repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
drslump/pyshould
|
pyshould/dsl.py
|
any_of
|
python
|
def any_of(value, *args):
if len(args):
value = (value,) + args
return ExpectationAny(value)
|
At least one of the items in value should match
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L33-L39
| null |
"""
Define the names making up the domain specific language
"""
from pyshould.expectation import (
Expectation, ExpectationNot,
ExpectationAll, ExpectationAny,
ExpectationNone, OPERATOR
)
from pyshould.dumper import Dumper
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Create instances to be used with the overloaded | operator
should = Expectation(deferred=True, factory=True)
should_not = ExpectationNot(deferred=True, factory=True)
should_all = ExpectationAll(deferred=True, factory=True)
should_any = ExpectationAny(deferred=True, factory=True)
should_none = ExpectationNone(deferred=True, factory=True)
should_either = Expectation(deferred=True, factory=True, def_op=OPERATOR.OR)
# Dumper instance for debugging mocks
dumper = Dumper()
def it(value):
""" Wraps a value in an expectation """
return Expectation(value)
def all_of(value, *args):
""" All the items in value should match """
if len(args):
value = (value,) + args
return ExpectationAll(value)
def none_of(value, *args):
""" None of the items in value should match """
if len(args):
value = (value,) + args
return ExpectationNone(value)
|
drslump/pyshould
|
pyshould/dsl.py
|
all_of
|
python
|
def all_of(value, *args):
if len(args):
value = (value,) + args
return ExpectationAll(value)
|
All the items in value should match
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L42-L48
| null |
"""
Define the names making up the domain specific language
"""
from pyshould.expectation import (
Expectation, ExpectationNot,
ExpectationAll, ExpectationAny,
ExpectationNone, OPERATOR
)
from pyshould.dumper import Dumper
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Create instances to be used with the overloaded | operator
should = Expectation(deferred=True, factory=True)
should_not = ExpectationNot(deferred=True, factory=True)
should_all = ExpectationAll(deferred=True, factory=True)
should_any = ExpectationAny(deferred=True, factory=True)
should_none = ExpectationNone(deferred=True, factory=True)
should_either = Expectation(deferred=True, factory=True, def_op=OPERATOR.OR)
# Dumper instance for debugging mocks
dumper = Dumper()
def it(value):
""" Wraps a value in an expectation """
return Expectation(value)
def any_of(value, *args):
""" At least one of the items in value should match """
if len(args):
value = (value,) + args
return ExpectationAny(value)
def none_of(value, *args):
""" None of the items in value should match """
if len(args):
value = (value,) + args
return ExpectationNone(value)
|
drslump/pyshould
|
pyshould/dsl.py
|
none_of
|
python
|
def none_of(value, *args):
if len(args):
value = (value,) + args
return ExpectationNone(value)
|
None of the items in value should match
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L51-L57
| null |
"""
Define the names making up the domain specific language
"""
from pyshould.expectation import (
Expectation, ExpectationNot,
ExpectationAll, ExpectationAny,
ExpectationNone, OPERATOR
)
from pyshould.dumper import Dumper
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Create instances to be used with the overloaded | operator
should = Expectation(deferred=True, factory=True)
should_not = ExpectationNot(deferred=True, factory=True)
should_all = ExpectationAll(deferred=True, factory=True)
should_any = ExpectationAny(deferred=True, factory=True)
should_none = ExpectationNone(deferred=True, factory=True)
should_either = Expectation(deferred=True, factory=True, def_op=OPERATOR.OR)
# Dumper instance for debugging mocks
dumper = Dumper()
def it(value):
""" Wraps a value in an expectation """
return Expectation(value)
def any_of(value, *args):
""" At least one of the items in value should match """
if len(args):
value = (value,) + args
return ExpectationAny(value)
def all_of(value, *args):
""" All the items in value should match """
if len(args):
value = (value,) + args
return ExpectationAll(value)
|
rmohr/static3
|
static.py
|
iter_and_close
|
python
|
def iter_and_close(file_like, block_size):
while 1:
try:
block = file_like.read(block_size)
if block:
yield block
else:
raise StopIteration
except StopIteration:
file_like.close()
return
|
Yield file contents by block then close the file.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L239-L250
| null |
#!/usr/bin/env python
"""
Copyright (C) 2012 Roman Mohr <roman@fenkhuber.at>
"""
"""static - A stupidly simple WSGI way to serve static (or mixed) content.
(See the docstrings of the various functions and classes.)
Copyright (C) 2006-2009 Luke Arno - http://lukearno.com/
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to:
The Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
Luke Arno can be found at http://lukearno.com/
"""
import mimetypes
import email.utils as rfc822
import time
import string
import sys
from os import path, stat
from wsgiref import util
from wsgiref.headers import Headers
from wsgiref.simple_server import make_server
from optparse import OptionParser
try:
from pkg_resources import resource_filename, Requirement
except:
pass
try:
import kid
except:
pass
try:
from genshi.template import MarkupTemplate
except:
pass
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
class MagicError(Exception):
pass
def _encode(string, encoding):
if sys.version_info[0] > 2:
return string.encode(encoding=encoding, errors='strict')
else:
if type(u('')) == type(string):
string = string.encode(encoding)
return string
def _decode(string, encoding):
if sys.version_info[0] > 2:
return string.decode(encoding=encoding, errors='strict')
else:
return string
def _open(filename, encoding):
if sys.version_info[0] > 2:
return open(filename, 'r', encoding=encoding, errors='strict')
else:
return open(filename, 'rb')
class StatusApp:
"""Used by WSGI apps to return some HTTP status."""
def __init__(self, status, message=None, encoding=sys.getdefaultencoding()):
self.status = status
self.encoding = encoding
if message is None:
self.message = status
else:
self.message = message
def __call__(self, environ, start_response, headers=[]):
if self.message:
Headers(headers).add_header('Content-type', 'text/plain')
start_response(self.status, headers)
if environ['REQUEST_METHOD'] == 'HEAD':
return [_encode("", self.encoding)]
else:
return [_encode(self.message, self.encoding)]
class Cling(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
self.encoding = sys.getdefaultencoding()
for k, v in kw.items():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] != '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
prezipped = ('gzip' in environ.get('HTTP_ACCEPT_ENCODING', [])
and path.exists(full_path + '.gz'))
if prezipped:
full_path += '.gz'
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
if prezipped:
headers.extend([('Content-Encoding', 'gzip'),
('Vary', 'Accept-Encoding')])
self._add_headers(headers, path_info, content_type)
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return [b'']
except (IOError, OSError) as e:
print(e)
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _is_under_root(self, full_path):
"""Guard against arbitrary file retrieval."""
if (path.abspath(full_path) + path.sep)\
.startswith(path.abspath(self.root) + path.sep):
return True
else:
return False
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return a tuple of etag, last_modified by mtime from stat."""
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _add_headers(self, headers, path, content_type):
DEFAULT = '__static_no_match__'
CONFIG_ITEMS = ['prefix', 'type', 'ext']
for config in getattr(self, 'headers', []):
if path.startswith(config.get('prefix', DEFAULT)) or \
content_type == config.get('type', DEFAULT) or \
path.endswith(config.get('ext', DEFAULT)):
for key, value in config.items():
if key not in CONFIG_ITEMS:
headers.append((key, value))
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
def cling_wrap(package_name, dir_name, **kw):
"""Return a Cling that serves from the given package and dir_name.
This uses pkg_resources.resource_filename which is not the
recommended way, since it extracts the files.
I think this works fine unless you have some _very_ serious
requirements for static content, in which case you probably
shouldn't be serving it through a WSGI app, IMHO. YMMV.
"""
resource = Requirement.parse(package_name)
return Cling(resource_filename(resource, dir_name), **kw)
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
class BaseMagic(object):
"""Base class for magic file handling.
Really a do nothing if you were to use this directly.
In a strait forward case you would just override .extension and body().
(See StringMagic in this module for a simple example of subclassing.)
In a more complex case you may need to override many or all methods.
"""
extension = ''
def exists(self, full_path):
"""Check that self.new_path(full_path) exists."""
if path.exists(self.new_path(full_path)):
return self.new_path(full_path)
def new_path(self, full_path):
"""Add the self.extension to the path."""
return full_path + self.extension
def old_path(self, full_path):
"""Remove self.extension from path or raise MagicError."""
if self.matches(full_path):
return full_path[:-len(self.extension)]
else:
raise MagicError("Path does not match this magic.")
def matches(self, full_path):
"""Check that path ends with self.extension."""
if full_path.endswith(self.extension):
return full_path
def conditions(self, full_path, environ):
"""Return Etag and Last-Modified values (based on mtime)."""
mtime = int(time.time())
return str(mtime), rfc822.formatdate(mtime)
def file_like(self, full_path, encoding):
"""Return a file object for path."""
return _open(full_path, encoding)
def body(self, environ, file_like):
"""Return an iterator over the body of the response."""
return [file_like.read()]
class StringMagic(BaseMagic):
"""Magic to replace variables in file contents using string.Template.
Using this requires Python2.4.
"""
extension = '.stp'
safe = False
def __init__(self, **variables):
"""Keyword arguments populate self.variables."""
self.variables = variables
def body(self, environ, file_like):
"""Pass environ and self.variables in to template.
self.variables overrides environ so that suprises in environ don't
cause unexpected output if you are passing a value in explicitly.
"""
variables = environ.copy()
variables.update(self.variables)
template = string.Template(file_like.read())
if self.safe is True:
return [template.safe_substitute(variables)]
else:
return [template.substitute(variables)]
class KidMagic(StringMagic):
"""Like StringMagic only using the Kid templating language.
Using this requires Kid: http://kid.lesscode.org/
"""
extension = '.kid'
def body(self, environ, full_path):
"""Pass environ and **self.variables into the template."""
template = kid.Template(file=full_path,
environ=environ,
**self.variables)
return [template.serialize()]
class GenshiMagic(StringMagic):
"""Like StringMagic only using the Genshi templating language.
Using this requires Genshi
"""
extension = '.genshi'
def body(self, environ, full_path):
"""Pass environ and **self.variables into the template."""
template = MarkupTemplate(full_path.read())
variables = self.variables.copy()
variables["environ"] = environ
return [template.generate(**variables)
.render('html', doctype='html')]
def command():
parser = OptionParser(usage="%prog DIR [HOST][:][PORT]",
version="static 0.3.6")
options, args = parser.parse_args()
if len(args) in (1, 2):
if len(args) == 2:
parts = args[1].split(":")
if len(parts) == 1:
host = parts[0]
port = None
elif len(parts) == 2:
host, port = parts
else:
sys.exit("Invalid host:port specification.")
elif len(args) == 1:
host, port = None, None
if not host:
host = '0.0.0.0'
if not port:
port = 9999
try:
port = int(port)
except:
sys.exit("Invalid host:port specification.")
app = Cling(args[0])
try:
make_server(host, port, app).serve_forever()
except KeyboardInterrupt:
print("Cio, baby!")
except:
sys.exit("Problem initializing server.")
else:
parser.print_help(sys.stderr)
sys.exit(1)
def test():
from wsgiref.validate import validator
magics = (StringMagic(title="String Test"),
KidMagic(title="Kid Test"), GenshiMagic(title="Genshi Test"))
#app = Shock('testdata/pub', magics=magics)
app = Cling('testdata/pub')
try:
make_server('localhost', 9999, validator(app)).serve_forever()
except KeyboardInterrupt:
print("Ciao, baby!")
if __name__ == '__main__':
test()
|
rmohr/static3
|
static.py
|
cling_wrap
|
python
|
def cling_wrap(package_name, dir_name, **kw):
resource = Requirement.parse(package_name)
return Cling(resource_filename(resource, dir_name), **kw)
|
Return a Cling that serves from the given package and dir_name.
This uses pkg_resources.resource_filename which is not the
recommended way, since it extracts the files.
I think this works fine unless you have some _very_ serious
requirements for static content, in which case you probably
shouldn't be serving it through a WSGI app, IMHO. YMMV.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L253-L264
| null |
#!/usr/bin/env python
"""
Copyright (C) 2012 Roman Mohr <roman@fenkhuber.at>
"""
"""static - A stupidly simple WSGI way to serve static (or mixed) content.
(See the docstrings of the various functions and classes.)
Copyright (C) 2006-2009 Luke Arno - http://lukearno.com/
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to:
The Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
Luke Arno can be found at http://lukearno.com/
"""
import mimetypes
import email.utils as rfc822
import time
import string
import sys
from os import path, stat
from wsgiref import util
from wsgiref.headers import Headers
from wsgiref.simple_server import make_server
from optparse import OptionParser
try:
from pkg_resources import resource_filename, Requirement
except:
pass
try:
import kid
except:
pass
try:
from genshi.template import MarkupTemplate
except:
pass
if sys.version < '3':
import codecs
def u(x):
return codecs.unicode_escape_decode(x)[0]
else:
def u(x):
return x
class MagicError(Exception):
pass
def _encode(string, encoding):
if sys.version_info[0] > 2:
return string.encode(encoding=encoding, errors='strict')
else:
if type(u('')) == type(string):
string = string.encode(encoding)
return string
def _decode(string, encoding):
if sys.version_info[0] > 2:
return string.decode(encoding=encoding, errors='strict')
else:
return string
def _open(filename, encoding):
if sys.version_info[0] > 2:
return open(filename, 'r', encoding=encoding, errors='strict')
else:
return open(filename, 'rb')
class StatusApp:
"""Used by WSGI apps to return some HTTP status."""
def __init__(self, status, message=None, encoding=sys.getdefaultencoding()):
self.status = status
self.encoding = encoding
if message is None:
self.message = status
else:
self.message = message
def __call__(self, environ, start_response, headers=[]):
if self.message:
Headers(headers).add_header('Content-type', 'text/plain')
start_response(self.status, headers)
if environ['REQUEST_METHOD'] == 'HEAD':
return [_encode("", self.encoding)]
else:
return [_encode(self.message, self.encoding)]
class Cling(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
self.encoding = sys.getdefaultencoding()
for k, v in kw.items():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] != '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
prezipped = ('gzip' in environ.get('HTTP_ACCEPT_ENCODING', [])
and path.exists(full_path + '.gz'))
if prezipped:
full_path += '.gz'
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
if prezipped:
headers.extend([('Content-Encoding', 'gzip'),
('Vary', 'Accept-Encoding')])
self._add_headers(headers, path_info, content_type)
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return [b'']
except (IOError, OSError) as e:
print(e)
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _is_under_root(self, full_path):
"""Guard against arbitrary file retrieval."""
if (path.abspath(full_path) + path.sep)\
.startswith(path.abspath(self.root) + path.sep):
return True
else:
return False
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return a tuple of etag, last_modified by mtime from stat."""
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _add_headers(self, headers, path, content_type):
DEFAULT = '__static_no_match__'
CONFIG_ITEMS = ['prefix', 'type', 'ext']
for config in getattr(self, 'headers', []):
if path.startswith(config.get('prefix', DEFAULT)) or \
content_type == config.get('type', DEFAULT) or \
path.endswith(config.get('ext', DEFAULT)):
for key, value in config.items():
if key not in CONFIG_ITEMS:
headers.append((key, value))
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
def iter_and_close(file_like, block_size):
"""Yield file contents by block then close the file."""
while 1:
try:
block = file_like.read(block_size)
if block:
yield block
else:
raise StopIteration
except StopIteration:
file_like.close()
return
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
class BaseMagic(object):
"""Base class for magic file handling.
Really a do nothing if you were to use this directly.
In a strait forward case you would just override .extension and body().
(See StringMagic in this module for a simple example of subclassing.)
In a more complex case you may need to override many or all methods.
"""
extension = ''
def exists(self, full_path):
"""Check that self.new_path(full_path) exists."""
if path.exists(self.new_path(full_path)):
return self.new_path(full_path)
def new_path(self, full_path):
"""Add the self.extension to the path."""
return full_path + self.extension
def old_path(self, full_path):
"""Remove self.extension from path or raise MagicError."""
if self.matches(full_path):
return full_path[:-len(self.extension)]
else:
raise MagicError("Path does not match this magic.")
def matches(self, full_path):
"""Check that path ends with self.extension."""
if full_path.endswith(self.extension):
return full_path
def conditions(self, full_path, environ):
"""Return Etag and Last-Modified values (based on mtime)."""
mtime = int(time.time())
return str(mtime), rfc822.formatdate(mtime)
def file_like(self, full_path, encoding):
"""Return a file object for path."""
return _open(full_path, encoding)
def body(self, environ, file_like):
"""Return an iterator over the body of the response."""
return [file_like.read()]
class StringMagic(BaseMagic):
"""Magic to replace variables in file contents using string.Template.
Using this requires Python2.4.
"""
extension = '.stp'
safe = False
def __init__(self, **variables):
"""Keyword arguments populate self.variables."""
self.variables = variables
def body(self, environ, file_like):
"""Pass environ and self.variables in to template.
self.variables overrides environ so that suprises in environ don't
cause unexpected output if you are passing a value in explicitly.
"""
variables = environ.copy()
variables.update(self.variables)
template = string.Template(file_like.read())
if self.safe is True:
return [template.safe_substitute(variables)]
else:
return [template.substitute(variables)]
class KidMagic(StringMagic):
"""Like StringMagic only using the Kid templating language.
Using this requires Kid: http://kid.lesscode.org/
"""
extension = '.kid'
def body(self, environ, full_path):
"""Pass environ and **self.variables into the template."""
template = kid.Template(file=full_path,
environ=environ,
**self.variables)
return [template.serialize()]
class GenshiMagic(StringMagic):
"""Like StringMagic only using the Genshi templating language.
Using this requires Genshi
"""
extension = '.genshi'
def body(self, environ, full_path):
"""Pass environ and **self.variables into the template."""
template = MarkupTemplate(full_path.read())
variables = self.variables.copy()
variables["environ"] = environ
return [template.generate(**variables)
.render('html', doctype='html')]
def command():
parser = OptionParser(usage="%prog DIR [HOST][:][PORT]",
version="static 0.3.6")
options, args = parser.parse_args()
if len(args) in (1, 2):
if len(args) == 2:
parts = args[1].split(":")
if len(parts) == 1:
host = parts[0]
port = None
elif len(parts) == 2:
host, port = parts
else:
sys.exit("Invalid host:port specification.")
elif len(args) == 1:
host, port = None, None
if not host:
host = '0.0.0.0'
if not port:
port = 9999
try:
port = int(port)
except:
sys.exit("Invalid host:port specification.")
app = Cling(args[0])
try:
make_server(host, port, app).serve_forever()
except KeyboardInterrupt:
print("Cio, baby!")
except:
sys.exit("Problem initializing server.")
else:
parser.print_help(sys.stderr)
sys.exit(1)
def test():
from wsgiref.validate import validator
magics = (StringMagic(title="String Test"),
KidMagic(title="Kid Test"), GenshiMagic(title="Genshi Test"))
#app = Shock('testdata/pub', magics=magics)
app = Cling('testdata/pub')
try:
make_server('localhost', 9999, validator(app)).serve_forever()
except KeyboardInterrupt:
print("Ciao, baby!")
if __name__ == '__main__':
test()
|
rmohr/static3
|
static.py
|
Cling._is_under_root
|
python
|
def _is_under_root(self, full_path):
if (path.abspath(full_path) + path.sep)\
.startswith(path.abspath(self.root) + path.sep):
return True
else:
return False
|
Guard against arbitrary file retrieval.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L200-L206
| null |
class Cling(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
self.encoding = sys.getdefaultencoding()
for k, v in kw.items():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] != '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
prezipped = ('gzip' in environ.get('HTTP_ACCEPT_ENCODING', [])
and path.exists(full_path + '.gz'))
if prezipped:
full_path += '.gz'
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
if prezipped:
headers.extend([('Content-Encoding', 'gzip'),
('Vary', 'Accept-Encoding')])
self._add_headers(headers, path_info, content_type)
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return [b'']
except (IOError, OSError) as e:
print(e)
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return a tuple of etag, last_modified by mtime from stat."""
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _add_headers(self, headers, path, content_type):
DEFAULT = '__static_no_match__'
CONFIG_ITEMS = ['prefix', 'type', 'ext']
for config in getattr(self, 'headers', []):
if path.startswith(config.get('prefix', DEFAULT)) or \
content_type == config.get('type', DEFAULT) or \
path.endswith(config.get('ext', DEFAULT)):
for key, value in config.items():
if key not in CONFIG_ITEMS:
headers.append((key, value))
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Cling._conditions
|
python
|
def _conditions(self, full_path, environ):
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
|
Return a tuple of etag, last_modified by mtime from stat.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L212-L215
| null |
class Cling(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
self.encoding = sys.getdefaultencoding()
for k, v in kw.items():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] != '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
prezipped = ('gzip' in environ.get('HTTP_ACCEPT_ENCODING', [])
and path.exists(full_path + '.gz'))
if prezipped:
full_path += '.gz'
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
if prezipped:
headers.extend([('Content-Encoding', 'gzip'),
('Vary', 'Accept-Encoding')])
self._add_headers(headers, path_info, content_type)
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return [b'']
except (IOError, OSError) as e:
print(e)
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _is_under_root(self, full_path):
"""Guard against arbitrary file retrieval."""
if (path.abspath(full_path) + path.sep)\
.startswith(path.abspath(self.root) + path.sep):
return True
else:
return False
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _add_headers(self, headers, path, content_type):
DEFAULT = '__static_no_match__'
CONFIG_ITEMS = ['prefix', 'type', 'ext']
for config in getattr(self, 'headers', []):
if path.startswith(config.get('prefix', DEFAULT)) or \
content_type == config.get('type', DEFAULT) or \
path.endswith(config.get('ext', DEFAULT)):
for key, value in config.items():
if key not in CONFIG_ITEMS:
headers.append((key, value))
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Cling._body
|
python
|
def _body(self, full_path, environ, file_like):
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
Return an iterator over the body of the response.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L233-L236
| null |
class Cling(object):
"""A stupidly simple way to serve static content via WSGI.
Serve the file of the same path as PATH_INFO in self.datadir.
Look up the Content-type in self.content_types by extension
or use 'text/plain' if the extension is not found.
Serve up the contents of the file or delegate to self.not_found.
"""
block_size = 16 * 4096
index_file = 'index.html'
not_found = StatusApp('404 Not Found')
not_modified = StatusApp('304 Not Modified', "")
moved_permanently = StatusApp('301 Moved Permanently')
method_not_allowed = StatusApp('405 Method Not Allowed')
def __init__(self, root, **kw):
"""Just set the root and any other attribs passes via **kw."""
self.root = root
self.encoding = sys.getdefaultencoding()
for k, v in kw.items():
setattr(self, k, v)
def __call__(self, environ, start_response):
"""Respond to a request when called in the usual WSGI way."""
if environ['REQUEST_METHOD'] not in ('GET', 'HEAD'):
headers = [('Allow', 'GET, HEAD')]
return self.method_not_allowed(environ, start_response, headers)
path_info = environ.get('PATH_INFO', '')
full_path = self._full_path(path_info)
if not self._is_under_root(full_path):
return self.not_found(environ, start_response)
if path.isdir(full_path):
if full_path[-1] != '/' or full_path == self.root:
location = util.request_uri(environ, include_query=False) + '/'
if environ.get('QUERY_STRING'):
location += '?' + environ.get('QUERY_STRING')
headers = [('Location', location)]
return self.moved_permanently(environ, start_response, headers)
else:
full_path = self._full_path(path_info + self.index_file)
prezipped = ('gzip' in environ.get('HTTP_ACCEPT_ENCODING', [])
and path.exists(full_path + '.gz'))
if prezipped:
full_path += '.gz'
content_type = self._guess_type(full_path)
try:
etag, last_modified = self._conditions(full_path, environ)
headers = [('Date', rfc822.formatdate(time.time())),
('Last-Modified', last_modified),
('ETag', etag)]
if_modified = environ.get('HTTP_IF_MODIFIED_SINCE')
if if_modified and (rfc822.parsedate(if_modified)
>= rfc822.parsedate(last_modified)):
return self.not_modified(environ, start_response, headers)
if_none = environ.get('HTTP_IF_NONE_MATCH')
if if_none and (if_none == '*' or etag in if_none):
return self.not_modified(environ, start_response, headers)
file_like = self._file_like(full_path)
headers.append(('Content-Type', content_type))
if prezipped:
headers.extend([('Content-Encoding', 'gzip'),
('Vary', 'Accept-Encoding')])
self._add_headers(headers, path_info, content_type)
start_response("200 OK", headers)
if environ['REQUEST_METHOD'] == 'GET':
return self._body(full_path, environ, file_like)
else:
return [b'']
except (IOError, OSError) as e:
print(e)
return self.not_found(environ, start_response)
def _full_path(self, path_info):
"""Return the full path from which to read."""
return self.root + path_info
def _is_under_root(self, full_path):
"""Guard against arbitrary file retrieval."""
if (path.abspath(full_path) + path.sep)\
.startswith(path.abspath(self.root) + path.sep):
return True
else:
return False
def _guess_type(self, full_path):
"""Guess the mime type using the mimetypes module."""
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return a tuple of etag, last_modified by mtime from stat."""
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _add_headers(self, headers, path, content_type):
DEFAULT = '__static_no_match__'
CONFIG_ITEMS = ['prefix', 'type', 'ext']
for config in getattr(self, 'headers', []):
if path.startswith(config.get('prefix', DEFAULT)) or \
content_type == config.get('type', DEFAULT) or \
path.endswith(config.get('ext', DEFAULT)):
for key, value in config.items():
if key not in CONFIG_ITEMS:
headers.append((key, value))
def _file_like(self, full_path):
"""Return the appropriate file object."""
return open(full_path, 'rb')
|
rmohr/static3
|
static.py
|
Shock._match_magic
|
python
|
def _match_magic(self, full_path):
for magic in self.magics:
if magic.matches(full_path):
return magic
|
Return the first magic that matches this path or None.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L296-L300
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Shock._full_path
|
python
|
def _full_path(self, path_info):
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
|
Return the full path from which to read.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L302-L312
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Shock._guess_type
|
python
|
def _guess_type(self, full_path):
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
|
Guess the mime type magically or using the mimetypes module.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L314-L321
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Shock._conditions
|
python
|
def _conditions(self, full_path, environ):
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
|
Return Etag and Last-Modified values defaults to now for both.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L323-L330
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Shock._file_like
|
python
|
def _file_like(self, full_path):
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
|
Return the appropriate file object.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L332-L338
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _body(self, full_path, environ, file_like):
"""Return an iterator over the body of the response."""
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
rmohr/static3
|
static.py
|
Shock._body
|
python
|
def _body(self, full_path, environ, file_like):
magic = self._match_magic(full_path)
if magic is not None:
return [_encode(s, self.encoding) for s in magic.body(environ,
file_like)]
else:
way_to_send = environ.get('wsgi.file_wrapper', iter_and_close)
return way_to_send(file_like, self.block_size)
|
Return an iterator over the body of the response.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L340-L348
| null |
class Shock(Cling):
"""A stupidly simple way to serve up mixed content.
Serves static content just like Cling (it's superclass)
except that it process content with the first matching
magic from self.magics if any apply.
See Cling and classes with "Magic" in their names in this module.
If you are using Shock with the StringMagic class for instance:
shock = Shock('/data', magics=[StringMagic(food='cheese')])
Let's say you have a file called /data/foo.txt.stp containing one line:
"I love to eat $food!"
When you do a GET on /foo.txt you will see this in your browser:
"I love to eat cheese!"
This is really nice if you have a color variable in your css files or
something trivial like that. It seems silly to create or change a
handful of objects for a couple of dynamic bits of text.
"""
magics = ()
def _match_magic(self, full_path):
"""Return the first magic that matches this path or None."""
for magic in self.magics:
if magic.matches(full_path):
return magic
def _full_path(self, path_info):
"""Return the full path from which to read."""
full_path = self.root + path_info
if path.exists(full_path):
return full_path
else:
for magic in self.magics:
if path.exists(magic.new_path(full_path)):
return magic.new_path(full_path)
else:
return full_path
def _guess_type(self, full_path):
"""Guess the mime type magically or using the mimetypes module."""
magic = self._match_magic(full_path)
if magic is not None:
return (mimetypes.guess_type(magic.old_path(full_path))[0]
or 'text/plain')
else:
return mimetypes.guess_type(full_path)[0] or 'text/plain'
def _conditions(self, full_path, environ):
"""Return Etag and Last-Modified values defaults to now for both."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.conditions(full_path, environ)
else:
mtime = stat(full_path).st_mtime
return str(mtime), rfc822.formatdate(mtime)
def _file_like(self, full_path):
"""Return the appropriate file object."""
magic = self._match_magic(full_path)
if magic is not None:
return magic.file_like(full_path, self.encoding)
else:
return open(full_path, 'rb')
|
rmohr/static3
|
static.py
|
BaseMagic.exists
|
python
|
def exists(self, full_path):
if path.exists(self.new_path(full_path)):
return self.new_path(full_path)
|
Check that self.new_path(full_path) exists.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L365-L368
|
[
"def new_path(self, full_path):\n \"\"\"Add the self.extension to the path.\"\"\"\n return full_path + self.extension\n"
] |
class BaseMagic(object):
"""Base class for magic file handling.
Really a do nothing if you were to use this directly.
In a strait forward case you would just override .extension and body().
(See StringMagic in this module for a simple example of subclassing.)
In a more complex case you may need to override many or all methods.
"""
extension = ''
def new_path(self, full_path):
"""Add the self.extension to the path."""
return full_path + self.extension
def old_path(self, full_path):
"""Remove self.extension from path or raise MagicError."""
if self.matches(full_path):
return full_path[:-len(self.extension)]
else:
raise MagicError("Path does not match this magic.")
def matches(self, full_path):
"""Check that path ends with self.extension."""
if full_path.endswith(self.extension):
return full_path
def conditions(self, full_path, environ):
"""Return Etag and Last-Modified values (based on mtime)."""
mtime = int(time.time())
return str(mtime), rfc822.formatdate(mtime)
def file_like(self, full_path, encoding):
"""Return a file object for path."""
return _open(full_path, encoding)
def body(self, environ, file_like):
"""Return an iterator over the body of the response."""
return [file_like.read()]
|
rmohr/static3
|
static.py
|
BaseMagic.old_path
|
python
|
def old_path(self, full_path):
if self.matches(full_path):
return full_path[:-len(self.extension)]
else:
raise MagicError("Path does not match this magic.")
|
Remove self.extension from path or raise MagicError.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L374-L379
|
[
"def matches(self, full_path):\n \"\"\"Check that path ends with self.extension.\"\"\"\n if full_path.endswith(self.extension):\n return full_path\n"
] |
class BaseMagic(object):
"""Base class for magic file handling.
Really a do nothing if you were to use this directly.
In a strait forward case you would just override .extension and body().
(See StringMagic in this module for a simple example of subclassing.)
In a more complex case you may need to override many or all methods.
"""
extension = ''
def exists(self, full_path):
"""Check that self.new_path(full_path) exists."""
if path.exists(self.new_path(full_path)):
return self.new_path(full_path)
def new_path(self, full_path):
"""Add the self.extension to the path."""
return full_path + self.extension
def matches(self, full_path):
"""Check that path ends with self.extension."""
if full_path.endswith(self.extension):
return full_path
def conditions(self, full_path, environ):
"""Return Etag and Last-Modified values (based on mtime)."""
mtime = int(time.time())
return str(mtime), rfc822.formatdate(mtime)
def file_like(self, full_path, encoding):
"""Return a file object for path."""
return _open(full_path, encoding)
def body(self, environ, file_like):
"""Return an iterator over the body of the response."""
return [file_like.read()]
|
rmohr/static3
|
static.py
|
BaseMagic.conditions
|
python
|
def conditions(self, full_path, environ):
mtime = int(time.time())
return str(mtime), rfc822.formatdate(mtime)
|
Return Etag and Last-Modified values (based on mtime).
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L386-L389
| null |
class BaseMagic(object):
"""Base class for magic file handling.
Really a do nothing if you were to use this directly.
In a strait forward case you would just override .extension and body().
(See StringMagic in this module for a simple example of subclassing.)
In a more complex case you may need to override many or all methods.
"""
extension = ''
def exists(self, full_path):
"""Check that self.new_path(full_path) exists."""
if path.exists(self.new_path(full_path)):
return self.new_path(full_path)
def new_path(self, full_path):
"""Add the self.extension to the path."""
return full_path + self.extension
def old_path(self, full_path):
"""Remove self.extension from path or raise MagicError."""
if self.matches(full_path):
return full_path[:-len(self.extension)]
else:
raise MagicError("Path does not match this magic.")
def matches(self, full_path):
"""Check that path ends with self.extension."""
if full_path.endswith(self.extension):
return full_path
def file_like(self, full_path, encoding):
"""Return a file object for path."""
return _open(full_path, encoding)
def body(self, environ, file_like):
"""Return an iterator over the body of the response."""
return [file_like.read()]
|
rmohr/static3
|
static.py
|
StringMagic.body
|
python
|
def body(self, environ, file_like):
variables = environ.copy()
variables.update(self.variables)
template = string.Template(file_like.read())
if self.safe is True:
return [template.safe_substitute(variables)]
else:
return [template.substitute(variables)]
|
Pass environ and self.variables in to template.
self.variables overrides environ so that suprises in environ don't
cause unexpected output if you are passing a value in explicitly.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L414-L426
| null |
class StringMagic(BaseMagic):
"""Magic to replace variables in file contents using string.Template.
Using this requires Python2.4.
"""
extension = '.stp'
safe = False
def __init__(self, **variables):
"""Keyword arguments populate self.variables."""
self.variables = variables
|
rmohr/static3
|
static.py
|
KidMagic.body
|
python
|
def body(self, environ, full_path):
template = kid.Template(file=full_path,
environ=environ,
**self.variables)
return [template.serialize()]
|
Pass environ and **self.variables into the template.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L438-L443
| null |
class KidMagic(StringMagic):
"""Like StringMagic only using the Kid templating language.
Using this requires Kid: http://kid.lesscode.org/
"""
extension = '.kid'
|
rmohr/static3
|
static.py
|
GenshiMagic.body
|
python
|
def body(self, environ, full_path):
template = MarkupTemplate(full_path.read())
variables = self.variables.copy()
variables["environ"] = environ
return [template.generate(**variables)
.render('html', doctype='html')]
|
Pass environ and **self.variables into the template.
|
train
|
https://github.com/rmohr/static3/blob/e5f88c5e91789bd4db7fde0cf59e4a15c3326f11/static.py#L455-L462
| null |
class GenshiMagic(StringMagic):
"""Like StringMagic only using the Genshi templating language.
Using this requires Genshi
"""
extension = '.genshi'
|
ifduyue/urlfetch
|
urlfetch.py
|
fetch
|
python
|
def fetch(*args, **kwargs):
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
|
fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L528-L540
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
request
|
python
|
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
|
request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L558-L763
|
[
"def parse_url(url):\n \"\"\"Return a dictionary of parsed url\n\n Including scheme, netloc, path, params, query, fragment, uri, username,\n password, host, port and http_host\n \"\"\"\n try:\n url = unicode(url)\n except UnicodeDecodeError:\n pass\n\n if py3k:\n make_utf8 = lambda x: x\n else:\n make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x\n\n if '://' in url:\n scheme, url = url.split('://', 1)\n else:\n scheme = 'http'\n url = 'http://' + url\n parsed = urlparse.urlsplit(url)\n r = ObjectDict()\n r['scheme'] = make_utf8(scheme)\n r['netloc'] = make_utf8(parsed.netloc)\n r['path'] = make_utf8(parsed.path)\n r['query'] = make_utf8(parsed.query)\n r['fragment'] = make_utf8(parsed.fragment)\n r['uri'] = make_utf8(parsed.path)\n if parsed.query:\n r['uri'] += '?' + make_utf8(parsed.query)\n r['username'] = make_utf8(parsed.username)\n r['password'] = make_utf8(parsed.password)\n host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))\n r['host'] = r['hostname'] = host\n try:\n r['port'] = parsed.port\n except ValueError:\n r['port'] = None\n if r['port']:\n r['http_host'] = '%s:%d' % (r['host'], r['port'])\n else:\n r['http_host'] = r['host']\n\n return r\n",
"def random_useragent(filename=True):\n \"\"\"Returns a User-Agent string randomly from file.\n\n :arg string filename: (Optional) Path to the file from which a random\n useragent is generated. By default it's ``True``, a file shipped\n with this module will be used.\n :returns: An user-agent string.\n \"\"\"\n import random\n\n default_ua = 'urlfetch/%s' % __version__\n\n if isinstance(filename, basestring):\n filenames = [filename]\n else:\n filenames = []\n\n if filename and UAFILE:\n filenames.append(UAFILE)\n\n for filename in filenames:\n try:\n st = os.stat(filename)\n if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):\n break\n except:\n pass\n else:\n return default_ua\n\n with open(filename, 'rb') as f:\n filesize = st.st_size\n pos = 0\n r = random.Random()\n\n # try getting a valid line for no more than 3 times\n for i in range(3):\n\n pos += r.randint(0, filesize)\n pos %= filesize\n f.seek(pos)\n\n # in case we are in middle of a line\n f.readline()\n\n line = f.readline()\n if not line:\n if f.tell() == filesize:\n # end of file\n f.seek(0)\n line = f.readline()\n\n line = line.strip()\n if line and line[0] != '#':\n return line\n\n return default_ua\n",
"def url_concat(url, args, keep_existing=True):\n \"\"\"Concatenate url and argument dictionary\n\n >>> url_concat(\"http://example.com/foo?a=b\", dict(c=\"d\"))\n 'http://example.com/foo?a=b&c=d'\n\n :arg string url: URL being concat to.\n :arg dict args: Args being concat.\n :arg bool keep_existing: (Optional) Whether to keep the args which are\n alreay in url, default is ``True``.\n \"\"\"\n if not args:\n return url\n\n if keep_existing:\n if url[-1] not in ('?', '&'):\n url += '&' if ('?' in url) else '?'\n return url + urlencode(args, 1)\n else:\n url, seq, query = url.partition('?')\n query = urlparse.parse_qs(query, True)\n query.update(args)\n return url + '?' + urlencode(query, 1)\n",
"def encode_multipart(data, files):\n \"\"\"Encode multipart.\n\n :arg dict data: Data to be encoded\n :arg dict files: Files to be encoded\n :returns: Encoded binary string\n :raises: :class:`UrlfetchException`\n \"\"\"\n body = BytesIO()\n boundary = choose_boundary()\n part_boundary = b('--%s\\r\\n' % boundary)\n writer = codecs.lookup('utf-8')[3]\n\n if isinstance(data, dict):\n for name, values in data.items():\n if not isinstance(values, (list, tuple, set)):\n # behave like urllib.urlencode(dict, 1)\n values = (values, )\n for value in values:\n body.write(part_boundary)\n writer(body).write('Content-Disposition: form-data; '\n 'name=\"%s\"\\r\\n' % name)\n body.write(b'Content-Type: text/plain\\r\\n\\r\\n')\n if isinstance(value, int):\n value = str(value)\n if py3k and isinstance(value, str):\n writer(body).write(value)\n else:\n body.write(value)\n body.write(b'\\r\\n')\n\n for fieldname, f in files.items():\n if isinstance(f, tuple):\n filename, f = f\n elif hasattr(f, 'name'):\n filename = basename(f.name)\n else:\n filename = None\n raise UrlfetchException(\"file must has filename\")\n\n if hasattr(f, 'read'):\n value = f.read()\n elif isinstance(f, basestring):\n value = f\n else:\n value = str(f)\n\n body.write(part_boundary)\n if filename:\n writer(body).write('Content-Disposition: form-data; name=\"%s\"; '\n 'filename=\"%s\"\\r\\n' % (fieldname, filename))\n body.write(b'Content-Type: application/octet-stream\\r\\n\\r\\n')\n else:\n writer(body).write('Content-Disposition: form-data; name=\"%s\"'\n '\\r\\n' % name)\n body.write(b'Content-Type: text/plain\\r\\n\\r\\n')\n\n if py3k and isinstance(value, str):\n writer(body).write(value)\n else:\n body.write(value)\n body.write(b'\\r\\n')\n\n body.write(b('--' + boundary + '--\\r\\n'))\n\n content_type = 'multipart/form-data; boundary=%s' % boundary\n\n return content_type, body.getvalue()\n",
"def make_connection(conn_type, host, port, timeout, source_address):\n \"\"\"Return HTTP or HTTPS connection.\"\"\"\n if support_source_address:\n kwargs = {'timeout': timeout, 'source_address': source_address}\n else:\n kwargs = {'timeout': timeout}\n if source_address is not None:\n raise UrlfetchException('source_address requires'\n 'Python 2.7/3.2 or newer versions')\n if conn_type == 'http':\n conn = HTTPConnection(host, port, **kwargs)\n elif conn_type == 'https':\n conn = HTTPSConnection(host, port, **kwargs)\n else:\n raise URLError('Unknown Connection Type: %s' % conn_type)\n return conn\n",
"def from_httplib(cls, connection, **kwargs):\n \"\"\"Make an :class:`~urlfetch.Response` object from a httplib response\n object.\"\"\"\n return cls(connection, **kwargs)\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
parse_url
|
python
|
def parse_url(url):
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
|
Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L801-L845
|
[
"make_utf8 = lambda x: x\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
get_proxies_from_environ
|
python
|
def get_proxies_from_environ():
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
|
Get proxies from os.environ.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L848-L857
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
mb_code
|
python
|
def mb_code(s, coding=None, errors='replace'):
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
|
encoding/decoding helper.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L860-L870
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
random_useragent
|
python
|
def random_useragent(filename=True):
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
|
Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L873-L929
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
url_concat
|
python
|
def url_concat(url, args, keep_existing=True):
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
|
Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L932-L954
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
choose_boundary
|
python
|
def choose_boundary():
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
|
Generate a multipart boundry.
:returns: A boundary string
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L957-L976
| null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def encode_multipart(data, files):
"""Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
"""
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
encode_multipart
|
python
|
def encode_multipart(data, files):
body = BytesIO()
boundary = choose_boundary()
part_boundary = b('--%s\r\n' % boundary)
writer = codecs.lookup('utf-8')[3]
if isinstance(data, dict):
for name, values in data.items():
if not isinstance(values, (list, tuple, set)):
# behave like urllib.urlencode(dict, 1)
values = (values, )
for value in values:
body.write(part_boundary)
writer(body).write('Content-Disposition: form-data; '
'name="%s"\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(value, int):
value = str(value)
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
for fieldname, f in files.items():
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
filename = basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename")
if hasattr(f, 'read'):
value = f.read()
elif isinstance(f, basestring):
value = f
else:
value = str(f)
body.write(part_boundary)
if filename:
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
body.write(b'Content-Type: application/octet-stream\r\n\r\n')
else:
writer(body).write('Content-Disposition: form-data; name="%s"'
'\r\n' % name)
body.write(b'Content-Type: text/plain\r\n\r\n')
if py3k and isinstance(value, str):
writer(body).write(value)
else:
body.write(value)
body.write(b'\r\n')
body.write(b('--' + boundary + '--\r\n'))
content_type = 'multipart/form-data; boundary=%s' % boundary
return content_type, body.getvalue()
|
Encode multipart.
:arg dict data: Data to be encoded
:arg dict files: Files to be encoded
:returns: Encoded binary string
:raises: :class:`UrlfetchException`
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L979-L1046
|
[
"b = lambda s: s.encode('latin-1')\n",
"def choose_boundary():\n \"\"\"Generate a multipart boundry.\n\n :returns: A boundary string\n \"\"\"\n global BOUNDARY_PREFIX\n if BOUNDARY_PREFIX is None:\n BOUNDARY_PREFIX = \"urlfetch\"\n try:\n uid = repr(os.getuid())\n BOUNDARY_PREFIX += \".\" + uid\n except AttributeError:\n pass\n try:\n pid = repr(os.getpid())\n BOUNDARY_PREFIX += \".\" + pid\n except AttributeError:\n pass\n\n return \"%s.%s\" % (BOUNDARY_PREFIX, uuid.uuid4().hex)\n"
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
urlfetch
~~~~~~~~~~
An easy to use HTTP client based on httplib.
:copyright: (c) 2011-2019 by Yue Du.
:license: BSD 2-clause License, see LICENSE for more details.
"""
__version__ = '1.1.2'
__author__ = 'Yue Du <ifduyue@gmail.com>'
__url__ = 'https://github.com/ifduyue/urlfetch'
__license__ = 'BSD 2-Clause License'
import os, sys, base64, codecs, uuid, stat, time, socket
from os.path import basename, dirname, abspath, join as pathjoin
from functools import partial
from io import BytesIO
import re
try:
import simplejson as json
except ImportError:
import json
py3k = sys.version_info >= (3, 0)
support_source_address = (sys.version_info >= (2, 7) and not py3k
or sys.version_info >= (3, 2))
if py3k:
from http.client import HTTPConnection, HTTPSConnection
from urllib.parse import urlencode
import urllib.parse as urlparse
import http.cookies as Cookie
basestring = (str, bytes)
unicode = str
b = lambda s: s.encode('latin-1')
u = lambda s: s
else:
from httplib import HTTPConnection, HTTPSConnection
from urllib import urlencode
import urlparse
import Cookie
basestring = basestring
unicode = unicode
b = lambda s: s
u = lambda s: unicode(s, 'unicode_escape')
__all__ = ('request', 'fetch', 'Session',
'get', 'head', 'put', 'post', 'delete',
'options', 'trace', 'patch',
'UrlfetchException', 'ContentLimitExceeded', 'URLError',
'ContentDecodingError', 'TooManyRedirects')
class UrlfetchException(IOError):
"Base exception. All exceptions and errors will subclass from this."
class ContentLimitExceeded(UrlfetchException):
"Content length is beyond the limit."
class URLError(UrlfetchException, ValueError):
"Error parsing or handling the URL."
class ContentDecodingError(UrlfetchException):
"Failed to decode the content."
class TooManyRedirects(UrlfetchException):
"""Too many redirects."""
class Timeout(UrlfetchException):
"""Request timed out."""
class cached_property(object):
"""Cached property.
A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
self.__get = fget
self.__set = fset
self.__del = fdel
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
def __get__(self, instance, owner):
if instance is None:
# attribute is accessed through the owner class
return self
try:
return instance.__dict__[self.__name__]
except KeyError:
value = instance.__dict__[self.__name__] = self.__get(instance)
return value
def __set__(self, instance, value):
if instance is None:
return self
if self.__set is not None:
value = self.__set(instance, value)
instance.__dict__[self.__name__] = value
def __delete__(self, instance):
if instance is None:
return self
try:
value = instance.__dict__.pop(self.__name__)
except KeyError:
pass
else:
if self.__del is not None:
self.__del(instance, value)
def setter(self, fset):
return self.__class__(self.__get, fset, self.__del)
def deleter(self, fdel):
return self.__class__(self.__get, self.__set, fdel)
##############################################################################
# Core Methods and Classes ####################################################
##############################################################################
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
def fetch(*args, **kwargs):
"""fetch an URL.
:func:`~urlfetch.fetch` is a wrapper of :func:`~urlfetch.request`.
It calls :func:`~urlfetch.get` by default. If one of parameter ``data``
or parameter ``files`` is supplied, :func:`~urlfetch.post` is called.
"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return post(*args, **kwargs)
return get(*args, **kwargs)
def match_no_proxy(host, no_proxy):
ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})"
no_proxy_ip_regex = r"(\d{1,3}).(\d{1,3}).(\d{1,3}).(\d{1,3})(?=/(\d+))?"
ip_match = re.match(ip_regex, host)
no_proxy_ip_match = re.match(no_proxy_ip_regex, no_proxy)
if no_proxy_ip_match and ip_match:
host_bits = "".join("{:08b}".format(int(section)) for section in ip_match.group(1, 2, 3, 4))
no_proxy_bits = "".join("{:08b}".format(int(section)) for section in no_proxy_ip_match.group(1, 2, 3, 4))
if no_proxy_ip_match.group(5) is not None:
bit_match_count = int(no_proxy_ip_match.group(5))
return host_bits[:bit_match_count] == no_proxy_bits[:bit_match_count]
else:
return host_bits == no_proxy_bits
else:
return host.endswith(no_proxy)
def request(url, method="GET", params=None, data=None, headers={},
timeout=None, files={}, randua=False, auth=None, length_limit=None,
proxies=None, trust_env=True, max_redirects=0,
source_address=None, **kwargs):
"""request an URL
:arg string url: URL to be fetched.
:arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,
``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,
``PATCH``. ``GET`` is the default.
:arg dict/string params: (optional) Dict or string to attach to url as
querystring.
:arg dict headers: (optional) HTTP request headers.
:arg float timeout: (optional) Timeout in seconds
:arg files: (optional) Files to be sended
:arg randua: (optional) If ``True`` or ``path string``, use a random
user-agent in headers, instead of
``'urlfetch/' + __version__``
:arg tuple auth: (optional) (username, password) for basic authentication
:arg int length_limit: (optional) If ``None``, no limits on content length,
if the limit reached raised exception 'Content length
is more than ...'
:arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',
'https': '127.0.0.1:563'}
:arg bool trust_env: (optional) If ``True``, urlfetch will get infomations
from env, such as HTTP_PROXY, HTTPS_PROXY
:arg int max_redirects: (integer, optional) Max redirects allowed within a
request. Default is 0, which means redirects are
not allowed.
:arg tuple source_address: (optional) A tuple of (host, port) to
specify the source_address to bind to. This
argument is ignored if you're using Python prior
to 2.7/3.2.
:returns: A :class:`~urlfetch.Response` object
:raises: :class:`URLError`, :class:`UrlfetchException`,
:class:`TooManyRedirects`,
"""
def make_connection(conn_type, host, port, timeout, source_address):
"""Return HTTP or HTTPS connection."""
if support_source_address:
kwargs = {'timeout': timeout, 'source_address': source_address}
else:
kwargs = {'timeout': timeout}
if source_address is not None:
raise UrlfetchException('source_address requires'
'Python 2.7/3.2 or newer versions')
if conn_type == 'http':
conn = HTTPConnection(host, port, **kwargs)
elif conn_type == 'https':
conn = HTTPSConnection(host, port, **kwargs)
else:
raise URLError('Unknown Connection Type: %s' % conn_type)
return conn
via_proxy = False
method = method.upper()
if method not in ALLOWED_METHODS:
raise UrlfetchException("Method should be one of " +
", ".join(ALLOWED_METHODS))
if params:
if isinstance(params, dict):
url = url_concat(url, params)
elif isinstance(params, basestring):
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
url += params
parsed_url = parse_url(url)
reqheaders = {
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate, compress, identity, *',
'User-Agent': random_useragent(randua),
'Host': parsed_url['http_host']
}
# Proxy support
scheme = parsed_url['scheme']
if proxies is None and trust_env:
proxies = PROXIES
ignore_hosts = PROXY_IGNORE_HOSTS
if trust_env:
no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')
if no_proxy:
ignore_hosts = no_proxy.split(",")
proxy = proxies.get(scheme)
if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (scheme, proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['password'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout, source_address)
else:
conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],
timeout, source_address)
if not auth and parsed_url['username'] and parsed_url['password']:
auth = (parsed_url['username'], parsed_url['password'])
if auth:
if isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')
if files:
content_type, data = encode_multipart(data, files)
reqheaders['Content-Type'] = content_type
elif isinstance(data, dict):
data = urlencode(data, 1)
if isinstance(data, basestring) and not files:
# httplib will set 'Content-Length', also you can set it by yourself
reqheaders["Content-Type"] = "application/x-www-form-urlencoded"
# what if the method is GET, HEAD or DELETE
# just do not make so much decisions for users
reqheaders.update(headers)
start_time = time.time()
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
end_time = time.time()
total_time = end_time - start_time
history = []
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
while (response.status in (301, 302, 303, 307) and
'location' in response.headers and max_redirects):
response.body, response.close(), history.append(response)
if len(history) > max_redirects:
raise TooManyRedirects('max_redirects exceeded')
method = method if response.status == 307 else 'GET'
location = response.headers['location']
if location[:2] == '//':
url = parsed_url['scheme'] + ':' + location
else:
url = urlparse.urljoin(url, location)
parsed_url = parse_url(url)
reqheaders['Host'] = parsed_url['http_host']
reqheaders['Referer'] = response.url
# Proxy
scheme = parsed_url['scheme']
proxy = proxies.get(scheme)
if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:
via_proxy = True
if '://' not in proxy:
proxy = '%s://%s' % (parsed_url['scheme'], proxy)
parsed_proxy = parse_url(proxy)
# Proxy-Authorization
if parsed_proxy['username'] and parsed_proxy['password']:
proxyauth = '%s:%s' % (parsed_proxy['username'],
parsed_proxy['username'])
proxyauth = base64.b64encode(proxyauth.encode('utf-8'))
reqheaders['Proxy-Authorization'] = 'Basic ' + \
proxyauth.decode('utf-8')
conn = make_connection(scheme, parsed_proxy['host'],
parsed_proxy['port'], timeout,
source_address)
else:
via_proxy = False
reqheaders.pop('Proxy-Authorization', None)
conn = make_connection(scheme, parsed_url['host'],
parsed_url['port'], timeout, source_address)
try:
request_url = url if via_proxy else parsed_url['uri']
conn.request(method, request_url, data, reqheaders)
resp = conn.getresponse()
except socket.timeout as e:
raise Timeout(e)
except Exception as e:
raise UrlfetchException(e)
response = Response.from_httplib(resp, reqheaders=reqheaders,
length_limit=length_limit,
history=history[:], url=url,
total_time=total_time,
start_time=start_time)
return response
##############################################################################
# Shortcuts and Helpers #######################################################
##############################################################################
def _partial_method(method):
func = partial(request, method=method)
func.__doc__ = 'Issue a %s request' % method.lower()
func.__name__ = method.lower()
func.__module__ = request.__module__
return func
get = _partial_method("GET")
post = _partial_method("POST")
put = _partial_method("PUT")
delete = _partial_method("DELETE")
head = _partial_method("HEAD")
options = _partial_method("OPTIONS")
trace = _partial_method("TRACE")
patch = _partial_method("PATCH")
del _partial_method
class ObjectDict(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value
def parse_url(url):
"""Return a dictionary of parsed url
Including scheme, netloc, path, params, query, fragment, uri, username,
password, host, port and http_host
"""
try:
url = unicode(url)
except UnicodeDecodeError:
pass
if py3k:
make_utf8 = lambda x: x
else:
make_utf8 = lambda x: isinstance(x, unicode) and x.encode('utf-8') or x
if '://' in url:
scheme, url = url.split('://', 1)
else:
scheme = 'http'
url = 'http://' + url
parsed = urlparse.urlsplit(url)
r = ObjectDict()
r['scheme'] = make_utf8(scheme)
r['netloc'] = make_utf8(parsed.netloc)
r['path'] = make_utf8(parsed.path)
r['query'] = make_utf8(parsed.query)
r['fragment'] = make_utf8(parsed.fragment)
r['uri'] = make_utf8(parsed.path)
if parsed.query:
r['uri'] += '?' + make_utf8(parsed.query)
r['username'] = make_utf8(parsed.username)
r['password'] = make_utf8(parsed.password)
host = make_utf8(parsed.hostname.encode('idna').decode('utf-8'))
r['host'] = r['hostname'] = host
try:
r['port'] = parsed.port
except ValueError:
r['port'] = None
if r['port']:
r['http_host'] = '%s:%d' % (r['host'], r['port'])
else:
r['http_host'] = r['host']
return r
def get_proxies_from_environ():
"""Get proxies from os.environ."""
proxies = {}
http_proxy = os.getenv('http_proxy') or os.getenv('HTTP_PROXY')
https_proxy = os.getenv('https_proxy') or os.getenv('HTTPS_PROXY')
if http_proxy:
proxies['http'] = http_proxy
if https_proxy:
proxies['https'] = https_proxy
return proxies
def mb_code(s, coding=None, errors='replace'):
"""encoding/decoding helper."""
if isinstance(s, unicode):
return s if coding is None else s.encode(coding, errors=errors)
for c in ('utf-8', 'gb2312', 'gbk', 'gb18030', 'big5'):
try:
s = s.decode(c)
return s if coding is None else s.encode(coding, errors=errors)
except:
pass
return unicode(s, errors=errors)
def random_useragent(filename=True):
"""Returns a User-Agent string randomly from file.
:arg string filename: (Optional) Path to the file from which a random
useragent is generated. By default it's ``True``, a file shipped
with this module will be used.
:returns: An user-agent string.
"""
import random
default_ua = 'urlfetch/%s' % __version__
if isinstance(filename, basestring):
filenames = [filename]
else:
filenames = []
if filename and UAFILE:
filenames.append(UAFILE)
for filename in filenames:
try:
st = os.stat(filename)
if stat.S_ISREG(st.st_mode) and os.access(filename, os.R_OK):
break
except:
pass
else:
return default_ua
with open(filename, 'rb') as f:
filesize = st.st_size
pos = 0
r = random.Random()
# try getting a valid line for no more than 3 times
for i in range(3):
pos += r.randint(0, filesize)
pos %= filesize
f.seek(pos)
# in case we are in middle of a line
f.readline()
line = f.readline()
if not line:
if f.tell() == filesize:
# end of file
f.seek(0)
line = f.readline()
line = line.strip()
if line and line[0] != '#':
return line
return default_ua
def url_concat(url, args, keep_existing=True):
"""Concatenate url and argument dictionary
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
:arg string url: URL being concat to.
:arg dict args: Args being concat.
:arg bool keep_existing: (Optional) Whether to keep the args which are
alreay in url, default is ``True``.
"""
if not args:
return url
if keep_existing:
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args, 1)
else:
url, seq, query = url.partition('?')
query = urlparse.parse_qs(query, True)
query.update(args)
return url + '?' + urlencode(query, 1)
def choose_boundary():
"""Generate a multipart boundry.
:returns: A boundary string
"""
global BOUNDARY_PREFIX
if BOUNDARY_PREFIX is None:
BOUNDARY_PREFIX = "urlfetch"
try:
uid = repr(os.getuid())
BOUNDARY_PREFIX += "." + uid
except AttributeError:
pass
try:
pid = repr(os.getpid())
BOUNDARY_PREFIX += "." + pid
except AttributeError:
pass
return "%s.%s" % (BOUNDARY_PREFIX, uuid.uuid4().hex)
##############################################################################
# Constants and Globals #######################################################
##############################################################################
ALLOWED_METHODS = ("GET", "DELETE", "HEAD", "OPTIONS", "PUT", "POST", "TRACE",
"PATCH")
PROXY_IGNORE_HOSTS = ('127.0.0.1', 'localhost')
PROXIES = get_proxies_from_environ()
BOUNDARY_PREFIX = None
UAFILENAME = 'urlfetch.useragents.list'
UAFILE = next((i for i in set((pathjoin(sys.prefix, UAFILENAME),
pathjoin(sys.prefix, 'local', UAFILENAME),
pathjoin(dirname(abspath(__file__)), UAFILENAME)))
if os.path.isfile(i)), None)
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.body
|
python
|
def body(self):
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
|
Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L285-L300
|
[
"b = lambda s: s.encode('latin-1')\n",
"def close(self):\n \"\"\"Close the connection.\"\"\"\n self._r.close()\n"
] |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.json
|
python
|
def json(self):
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
|
Load response body as json.
:raises: :class:`ContentDecodingError`
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L314-L322
| null |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.headers
|
python
|
def headers(self):
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
|
Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L325-L350
| null |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.cookies
|
python
|
def cookies(self):
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
|
Cookies in dict
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L353-L356
| null |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.cookiestring
|
python
|
def cookiestring(self):
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
|
Cookie string
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L359-L361
| null |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
@cached_property
def links(self):
"""Links parsed from HTTP Link header"""
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Response.links
|
python
|
def links(self):
ret = []
linkheader = self.getheader('link')
if not linkheader:
return ret
for i in linkheader.split(','):
try:
url, params = i.split(';', 1)
except ValueError:
url, params = i, ''
link = {}
link['url'] = url.strip('''<> '"''')
for param in params.split(';'):
try:
k, v = param.split('=')
except ValueError:
break
link[k.strip(''' '"''')] = v.strip(''' '"''')
ret.append(link)
return ret
|
Links parsed from HTTP Link header
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L364-L384
| null |
class Response(object):
"""A Response object.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.total_time
0.033042049407959
>>> response.status, response.reason, response.version
(200, 'OK', 10)
>>> type(response.body), len(response.body)
(<type 'str'>, 8719)
>>> type(response.text), len(response.text)
(<type 'unicode'>, 8719)
>>> response.getheader('server')
'Apache/2.2.16 (Debian)'
>>> response.getheaders()
[
('content-length', '8719'),
('x-cache', 'MISS from localhost'),
('accept-ranges', 'bytes'),
('vary', 'Accept-Encoding'),
('server', 'Apache/2.2.16 (Debian)'),
('last-modified', 'Tue, 26 Jun 2012 19:23:18 GMT'),
('connection', 'close'),
('etag', '"13cc5e4-220f-4c36507ded580"'),
('date', 'Wed, 27 Jun 2012 06:50:30 GMT'),
('content-type', 'text/html'),
('x-cache-lookup', 'MISS from localhost:8080')
]
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
:raises: :class:`ContentLimitExceeded`
"""
def __init__(self, r, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
self._r = r # httplib.HTTPResponse
self.msg = r.msg
#: Status code returned by server.
self.status = r.status
# compatible with requests
#: An alias of :attr:`status`.
self.status_code = r.status
#: Reason phrase returned by server.
self.reason = r.reason
#: HTTP protocol version used by server.
#: 10 for HTTP/1.0, 11 for HTTP/1.1.
self.version = r.version
#: total time
self.total_time = kwargs.pop('total_time', None)
self.getheader = r.getheader
self.getheaders = r.getheaders
self._content_encoding = self.getheader('content-encoding', None)
self._decoder = None
try:
self.length_limit = int(kwargs.get('length_limit'))
except:
self.length_limit = None
# if content (length) size is more than length_limit, skip
content_length = int(self.getheader('Content-Length', 0))
if self.length_limit and content_length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d bytes"
% self.length_limit)
def read(self, chunk_size=65536):
"""Read content (for streaming and large files)
:arg int chunk_size: size of chunk, default is 8192.
"""
return self._r.read(chunk_size)
def __iter__(self):
return self
def __next__(self):
chunk = self.read()
if not chunk:
if self._decoder:
chunk = self._decoder.flush()
self._decoder = None
return chunk
else:
raise StopIteration
else:
ce = self._content_encoding
if ce in ('gzip', 'deflate'):
if not self._decoder:
import zlib
if ce == 'gzip':
self._decoder = zlib.decompressobj(16 + zlib.MAX_WBITS)
else:
self._decoder = zlib.decompressobj()
try:
return self._decoder.decompress(chunk)
except zlib.error:
self._decoder = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self._decoder.decompress(chunk)
except (IOError, zlib.error) as e:
self.close()
raise ContentDecodingError(e)
if ce:
self.close()
raise ContentDecodingError('Unknown encoding: %s' % ce)
return chunk
next = __next__
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
return False
@classmethod
def from_httplib(cls, connection, **kwargs):
"""Make an :class:`~urlfetch.Response` object from a httplib response
object."""
return cls(connection, **kwargs)
@cached_property
def body(self):
"""Response body.
:raises: :class:`ContentLimitExceeded`, :class:`ContentDecodingError`
"""
content = []
length = 0
for chunk in self:
content.append(chunk)
length += len(chunk)
if self.length_limit and length > self.length_limit:
self.close()
raise ContentLimitExceeded("Content length is more than %d "
"bytes" % self.length_limit)
return b("").join(content)
# compatible with requests
#: An alias of :attr:`body`.
@property
def content(self):
return self.body
@cached_property
def text(self):
"""Response body in unicode."""
return mb_code(self.content)
@cached_property
def json(self):
"""Load response body as json.
:raises: :class:`ContentDecodingError`
"""
try:
return json.loads(self.text)
except Exception as e:
raise ContentDecodingError(e)
@cached_property
def headers(self):
"""Response headers.
Response headers is a dict with all keys in lower case.
>>> import urlfetch
>>> response = urlfetch.get("http://docs.python.org/")
>>> response.headers
{
'content-length': '8719',
'x-cache': 'MISS from localhost',
'accept-ranges': 'bytes',
'vary': 'Accept-Encoding',
'server': 'Apache/2.2.16 (Debian)',
'last-modified': 'Tue, 26 Jun 2012 19:23:18 GMT',
'connection': 'close',
'etag': '"13cc5e4-220f-4c36507ded580"',
'date': 'Wed, 27 Jun 2012 06:50:30 GMT',
'content-type': 'text/html',
'x-cache-lookup': 'MISS from localhost:8080'
}
"""
if py3k:
return dict((k.lower(), v) for k, v in self.getheaders())
else:
return dict(self.getheaders())
@cached_property
def cookies(self):
"""Cookies in dict"""
c = Cookie.SimpleCookie(self.getheader('set-cookie'))
return dict((i.key, i.value) for i in c.values())
@cached_property
def cookiestring(self):
"""Cookie string"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cached_property
def close(self):
"""Close the connection."""
self._r.close()
def __del__(self):
self.close()
|
ifduyue/urlfetch
|
urlfetch.py
|
Session.cookiestring
|
python
|
def cookiestring(self, value):
"
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
|
Cookie string setter
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L452-L456
| null |
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def request(self, *args, **kwargs):
"""Issue a request."""
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
|
ifduyue/urlfetch
|
urlfetch.py
|
Session.request
|
python
|
def request(self, *args, **kwargs):
headers = self.headers.copy()
if self.cookiestring:
headers['Cookie'] = self.cookiestring
headers.update(kwargs.get('headers', {}))
kwargs['headers'] = headers
r = request(*args, **kwargs)
self.cookies.update(r.cookies)
return r
|
Issue a request.
|
train
|
https://github.com/ifduyue/urlfetch/blob/e0ea4673367c157eb832ba4ba2635306c81a61be/urlfetch.py#L465-L476
|
[
"def request(url, method=\"GET\", params=None, data=None, headers={},\n timeout=None, files={}, randua=False, auth=None, length_limit=None,\n proxies=None, trust_env=True, max_redirects=0,\n source_address=None, **kwargs):\n \"\"\"request an URL\n\n :arg string url: URL to be fetched.\n :arg string method: (optional) HTTP method, one of ``GET``, ``DELETE``,\n ``HEAD``, ``OPTIONS``, ``PUT``, ``POST``, ``TRACE``,\n ``PATCH``. ``GET`` is the default.\n :arg dict/string params: (optional) Dict or string to attach to url as\n querystring.\n :arg dict headers: (optional) HTTP request headers.\n :arg float timeout: (optional) Timeout in seconds\n :arg files: (optional) Files to be sended\n :arg randua: (optional) If ``True`` or ``path string``, use a random\n user-agent in headers, instead of\n ``'urlfetch/' + __version__``\n :arg tuple auth: (optional) (username, password) for basic authentication\n :arg int length_limit: (optional) If ``None``, no limits on content length,\n if the limit reached raised exception 'Content length\n is more than ...'\n :arg dict proxies: (optional) HTTP proxy, like {'http': '127.0.0.1:8888',\n 'https': '127.0.0.1:563'}\n :arg bool trust_env: (optional) If ``True``, urlfetch will get infomations\n from env, such as HTTP_PROXY, HTTPS_PROXY\n :arg int max_redirects: (integer, optional) Max redirects allowed within a\n request. Default is 0, which means redirects are\n not allowed.\n :arg tuple source_address: (optional) A tuple of (host, port) to\n specify the source_address to bind to. This\n argument is ignored if you're using Python prior\n to 2.7/3.2.\n :returns: A :class:`~urlfetch.Response` object\n :raises: :class:`URLError`, :class:`UrlfetchException`,\n :class:`TooManyRedirects`,\n \"\"\"\n def make_connection(conn_type, host, port, timeout, source_address):\n \"\"\"Return HTTP or HTTPS connection.\"\"\"\n if support_source_address:\n kwargs = {'timeout': timeout, 'source_address': source_address}\n else:\n kwargs = {'timeout': timeout}\n if source_address is not None:\n raise UrlfetchException('source_address requires'\n 'Python 2.7/3.2 or newer versions')\n if conn_type == 'http':\n conn = HTTPConnection(host, port, **kwargs)\n elif conn_type == 'https':\n conn = HTTPSConnection(host, port, **kwargs)\n else:\n raise URLError('Unknown Connection Type: %s' % conn_type)\n return conn\n\n via_proxy = False\n\n method = method.upper()\n if method not in ALLOWED_METHODS:\n raise UrlfetchException(\"Method should be one of \" +\n \", \".join(ALLOWED_METHODS))\n if params:\n if isinstance(params, dict):\n url = url_concat(url, params)\n elif isinstance(params, basestring):\n if url[-1] not in ('?', '&'):\n url += '&' if ('?' in url) else '?'\n url += params\n\n parsed_url = parse_url(url)\n\n reqheaders = {\n 'Accept': '*/*',\n 'Accept-Encoding': 'gzip, deflate, compress, identity, *',\n 'User-Agent': random_useragent(randua),\n 'Host': parsed_url['http_host']\n }\n\n # Proxy support\n scheme = parsed_url['scheme']\n if proxies is None and trust_env:\n proxies = PROXIES\n\n ignore_hosts = PROXY_IGNORE_HOSTS\n if trust_env:\n no_proxy = os.getenv('no_proxy') or os.getenv('NO_PROXY')\n if no_proxy:\n ignore_hosts = no_proxy.split(\",\")\n\n proxy = proxies.get(scheme)\n if proxy and not any(match_no_proxy(parsed_url['host'], host) for host in ignore_hosts):\n via_proxy = True\n if '://' not in proxy:\n proxy = '%s://%s' % (scheme, proxy)\n parsed_proxy = parse_url(proxy)\n # Proxy-Authorization\n if parsed_proxy['username'] and parsed_proxy['password']:\n proxyauth = '%s:%s' % (parsed_proxy['username'],\n parsed_proxy['password'])\n proxyauth = base64.b64encode(proxyauth.encode('utf-8'))\n reqheaders['Proxy-Authorization'] = 'Basic ' + \\\n proxyauth.decode('utf-8')\n conn = make_connection(scheme, parsed_proxy['host'],\n parsed_proxy['port'], timeout, source_address)\n else:\n conn = make_connection(scheme, parsed_url['host'], parsed_url['port'],\n timeout, source_address)\n\n if not auth and parsed_url['username'] and parsed_url['password']:\n auth = (parsed_url['username'], parsed_url['password'])\n if auth:\n if isinstance(auth, (list, tuple)):\n auth = '%s:%s' % tuple(auth)\n auth = base64.b64encode(auth.encode('utf-8'))\n reqheaders['Authorization'] = 'Basic ' + auth.decode('utf-8')\n\n if files:\n content_type, data = encode_multipart(data, files)\n reqheaders['Content-Type'] = content_type\n elif isinstance(data, dict):\n data = urlencode(data, 1)\n\n if isinstance(data, basestring) and not files:\n # httplib will set 'Content-Length', also you can set it by yourself\n reqheaders[\"Content-Type\"] = \"application/x-www-form-urlencoded\"\n # what if the method is GET, HEAD or DELETE\n # just do not make so much decisions for users\n\n reqheaders.update(headers)\n\n start_time = time.time()\n try:\n request_url = url if via_proxy else parsed_url['uri']\n conn.request(method, request_url, data, reqheaders)\n resp = conn.getresponse()\n except socket.timeout as e:\n raise Timeout(e)\n except Exception as e:\n raise UrlfetchException(e)\n\n end_time = time.time()\n total_time = end_time - start_time\n history = []\n response = Response.from_httplib(resp, reqheaders=reqheaders,\n length_limit=length_limit,\n history=history[:], url=url,\n total_time=total_time,\n start_time=start_time)\n\n while (response.status in (301, 302, 303, 307) and\n 'location' in response.headers and max_redirects):\n response.body, response.close(), history.append(response)\n\n if len(history) > max_redirects:\n raise TooManyRedirects('max_redirects exceeded')\n\n method = method if response.status == 307 else 'GET'\n location = response.headers['location']\n if location[:2] == '//':\n url = parsed_url['scheme'] + ':' + location\n else:\n url = urlparse.urljoin(url, location)\n parsed_url = parse_url(url)\n\n reqheaders['Host'] = parsed_url['http_host']\n reqheaders['Referer'] = response.url\n\n # Proxy\n scheme = parsed_url['scheme']\n proxy = proxies.get(scheme)\n if proxy and parsed_url['host'] not in PROXY_IGNORE_HOSTS:\n via_proxy = True\n if '://' not in proxy:\n proxy = '%s://%s' % (parsed_url['scheme'], proxy)\n parsed_proxy = parse_url(proxy)\n # Proxy-Authorization\n if parsed_proxy['username'] and parsed_proxy['password']:\n proxyauth = '%s:%s' % (parsed_proxy['username'],\n parsed_proxy['username'])\n proxyauth = base64.b64encode(proxyauth.encode('utf-8'))\n reqheaders['Proxy-Authorization'] = 'Basic ' + \\\n proxyauth.decode('utf-8')\n conn = make_connection(scheme, parsed_proxy['host'],\n parsed_proxy['port'], timeout,\n source_address)\n else:\n via_proxy = False\n reqheaders.pop('Proxy-Authorization', None)\n conn = make_connection(scheme, parsed_url['host'],\n parsed_url['port'], timeout, source_address)\n\n try:\n request_url = url if via_proxy else parsed_url['uri']\n conn.request(method, request_url, data, reqheaders)\n resp = conn.getresponse()\n except socket.timeout as e:\n raise Timeout(e)\n except Exception as e:\n raise UrlfetchException(e)\n\n response = Response.from_httplib(resp, reqheaders=reqheaders,\n length_limit=length_limit,\n history=history[:], url=url,\n total_time=total_time,\n start_time=start_time)\n\n return response\n"
] |
class Session(object):
"""A session object.
:class:`urlfetch.Session` can hold common headers and cookies.
Every request issued by a :class:`urlfetch.Session` object will bring u
these headers and cookies.
:class:`urlfetch.Session` plays a role in handling cookies, just like a
cookiejar.
:arg dict headers: Init headers.
:arg dict cookies: Init cookies.
:arg tuple auth: (username, password) for basic authentication.
"""
def __init__(self, headers={}, cookies={}, auth=None):
"""Init a :class:`~urlfetch.Session` object"""
#: headers
self.headers = headers.copy()
#: cookies
self.cookies = cookies.copy()
if auth and isinstance(auth, (list, tuple)):
auth = '%s:%s' % tuple(auth)
auth = base64.b64encode(auth.encode('utf-8'))
self.headers['Authorization'] = 'Basic ' + auth.decode('utf-8')
def putheader(self, header, value):
"""Add an header to default headers."""
self.headers[header] = value
def popheader(self, header):
"""Remove an header from default headers."""
return self.headers.pop(header)
def putcookie(self, key, value=""):
"""Add an cookie to default cookies."""
self.cookies[key] = value
def popcookie(self, key):
"""Remove an cookie from default cookies."""
return self.cookies.pop(key)
@property
def cookiestring(self):
"""Cookie string.
It's assignalbe, and will change :attr:`~.Session.cookies`
correspondingly.
>>> s = Session()
>>> s.cookiestring = 'foo=bar; 1=2'
>>> s.cookies
{'1': '2', 'foo': 'bar'}
"""
return '; '.join('%s=%s' % (k, v) for k, v in self.cookies.items())
@cookiestring.setter
def cookiestring(self, value):
""""Cookie string setter"""
c = Cookie.SimpleCookie(value)
sc = [(i.key, i.value) for i in c.values()]
self.cookies = dict(sc)
def snapshot(self):
session = {
'headers': self.headers.copy(),
'cookies': self.cookies.copy()
}
return session
def fetch(self, *args, **kwargs):
"""Fetch an URL"""
data = kwargs.get('data', None)
files = kwargs.get('files', {})
if data and isinstance(data, (basestring, dict)) or files:
return self.post(*args, **kwargs)
return self.get(*args, **kwargs)
def get(self, *args, **kwargs):
"""Issue a get request."""
kwargs['method'] = 'GET'
return self.request(*args, **kwargs)
def post(self, *args, **kwargs):
"""Issue a post request."""
kwargs['method'] = 'POST'
return self.request(*args, **kwargs)
def put(self, *args, **kwargs):
"""Issue a put request."""
kwargs['method'] = 'PUT'
return self.request(*args, **kwargs)
def delete(self, *args, **kwargs):
"""Issue a delete request."""
kwargs['method'] = 'DELETE'
return self.request(*args, **kwargs)
def head(self, *args, **kwargs):
"""Issue a head request."""
kwargs['method'] = 'HEAD'
return self.request(*args, **kwargs)
def options(self, *args, **kwargs):
"""Issue a options request."""
kwargs['method'] = 'OPTIONS'
return self.request(*args, **kwargs)
def trace(self, *args, **kwargs):
"""Issue a trace request."""
kwargs['method'] = 'TRACE'
return self.request(*args, **kwargs)
def patch(self, *args, **kwargs):
"""Issue a patch request."""
kwargs['method'] = 'PATCH'
return self.request(*args, **kwargs)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/navbar.py
|
NavBar.is_tracking_shield_displayed
|
python
|
def is_tracking_shield_displayed(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
if self.window.firefox_version >= 63: # Bug 1471713, 1476218
el = self.root.find_element(*self._tracking_protection_shield_locator)
return el.get_attribute("active") is not None
el = self.root.find_element(By.ID, "tracking-protection-icon")
return bool(el.get_attribute("state"))
|
Tracking Protection shield.
Returns:
bool: True or False if the Tracking Shield is displayed.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/navbar.py#L26-L38
| null |
class NavBar(Region):
"""Representation of the navigation bar.
Args:
window (:py:class:`BaseWindow`): Window object this region appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
region.
"""
_tracking_protection_shield_locator = (By.ID, "tracking-protection-icon-box")
@property
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/addons.py
|
AddOnInstallBlocked.allow
|
python
|
def allow(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_primary_button().click()
|
Allow the add-on to be installed.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L14-L17
|
[
"def find_primary_button(self):\n \"\"\"Retrieve the primary button.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-primary-button\")\n return self.root.find_anonymous_element_by_attribute(\n \"anonid\", \"button\")\n"
] |
class AddOnInstallBlocked(BaseNotification):
"""Add-on install blocked notification."""
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/addons.py
|
AddOnInstallConfirmation.addon_name
|
python
|
def addon_name(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.find_description()
return el.find_element(By.CSS_SELECTOR, "b").text
|
Provide access to the add-on name.
Returns:
str: Add-on name.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L24-L33
|
[
"def find_description(self):\n \"\"\"Retrieve the notification description.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-description\")\n return self.root.find_anonymous_element_by_attribute(\n \"class\", \"popup-notification-description\")\n"
] |
class AddOnInstallConfirmation(BaseNotification):
"""Add-on install confirmation notification."""
@property
def cancel(self):
"""Cancel add-on install."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_secondary_button().click()
def install(self):
"""Confirm add-on install."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_primary_button().click()
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/addons.py
|
AddOnInstallConfirmation.cancel
|
python
|
def cancel(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_secondary_button().click()
|
Cancel add-on install.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L35-L38
|
[
"def find_secondary_button(self):\n \"\"\"Retrieve the secondary button.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-secondary-button\")\n return self.root.find_anonymous_element_by_attribute(\n \"anonid\", \"secondarybutton\")\n"
] |
class AddOnInstallConfirmation(BaseNotification):
"""Add-on install confirmation notification."""
@property
def addon_name(self):
"""Provide access to the add-on name.
Returns:
str: Add-on name.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.find_description()
return el.find_element(By.CSS_SELECTOR, "b").text
def install(self):
"""Confirm add-on install."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_primary_button().click()
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/addons.py
|
AddOnInstallConfirmation.install
|
python
|
def install(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_primary_button().click()
|
Confirm add-on install.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L40-L43
|
[
"def find_primary_button(self):\n \"\"\"Retrieve the primary button.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-primary-button\")\n return self.root.find_anonymous_element_by_attribute(\n \"anonid\", \"button\")\n"
] |
class AddOnInstallConfirmation(BaseNotification):
"""Add-on install confirmation notification."""
@property
def addon_name(self):
"""Provide access to the add-on name.
Returns:
str: Add-on name.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.find_description()
return el.find_element(By.CSS_SELECTOR, "b").text
def cancel(self):
"""Cancel add-on install."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_secondary_button().click()
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/addons.py
|
AddOnInstallComplete.close
|
python
|
def close(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
if self.window.firefox_version > 63:
self.find_primary_button().click()
self.window.wait_for_notification(None)
else:
BaseNotification.close(self)
|
Close the notification.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/addons.py#L49-L56
|
[
"def find_primary_button(self):\n \"\"\"Retrieve the primary button.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-primary-button\")\n return self.root.find_anonymous_element_by_attribute(\n \"anonid\", \"button\")\n",
"def close(self):\n \"\"\"Close the notification.\"\"\"\n with self.selenium.context(self.selenium.CONTEXT_CHROME):\n self.find_close_button().click()\n self.window.wait_for_notification(None)\n"
] |
class AddOnInstallComplete(BaseNotification):
"""Add-on install complete notification."""
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/window.py
|
BrowserWindow.navbar
|
python
|
def navbar(self):
window = BaseWindow(self.selenium, self.selenium.current_window_handle)
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.selenium.find_element(*self._nav_bar_locator)
return NavBar(window, el)
|
Provide access to the Navigation Bar.
Returns:
:py:class:`NavBar`: FoxPuppet NavBar object.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L30-L40
| null |
class BrowserWindow(BaseWindow):
"""Representation of a browser window."""
_file_menu_button_locator = (By.ID, "file-menu")
_file_menu_private_window_locator = (By.ID, "menu_newPrivateWindow")
_file_menu_new_window_button_locator = (By.ID, "menu_newNavigator")
_nav_bar_locator = (By.ID, "nav-bar")
_notification_locator = (By.CSS_SELECTOR, "#notification-popup popupnotification")
_app_menu_notification_locator = (
By.CSS_SELECTOR,
"#appMenu-notification-popup popupnotification",
)
_tab_browser_locator = (By.ID, "tabbrowser-tabs")
@property
@property
def notification(self):
"""Provide access to the currently displayed notification.
Returns:
:py:class:`BaseNotification`: FoxPuppet BaseNotification object.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
try:
root = self.selenium.find_element(*self._notification_locator)
return BaseNotification.create(self, root)
except NoSuchElementException:
pass
try:
notifications = self.selenium.find_elements(
*self._app_menu_notification_locator
)
root = next(n for n in notifications if n.is_displayed())
return BaseNotification.create(self, root)
except StopIteration:
pass
return None # no notification is displayed
def wait_for_notification(self, notification_class=BaseNotification):
"""Wait for the specified notification to be displayed.
Args:
notification_class (:py:class:`BaseNotification`, optional):
The notification class to wait for. If `None` is specified it
will wait for any notification to be closed. Defaults to
`BaseNotification`.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
if notification_class:
if notification_class is BaseNotification:
message = "No notification was shown."
else:
message = "{0} was not shown.".format(notification_class.__name__)
self.wait.until(
lambda _: isinstance(self.notification, notification_class),
message=message,
)
return self.notification
else:
self.wait.until(
lambda _: self.notification is None,
message="Unexpected notification shown.",
)
@property
def is_private(self):
"""Property that checks if the specified window is private or not.
Returns:
bool: True if this is a Private Browsing window.
"""
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.selenium.execute_script(
"""
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
let chromeWindow = arguments[0].ownerDocument.defaultView;
return PrivateBrowsingUtils.isWindowPrivate(chromeWindow);
""",
self.document_element,
)
def open_window(self, private=False):
"""Open a new browser window.
Args:
private (bool): Optional parameter to open a private browsing
window. Defaults to False.
Returns:
:py:class:`BrowserWindow`: Opened window.
"""
handles_before = self.selenium.window_handles
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
# Opens private or non-private window
self.selenium.find_element(*self._file_menu_button_locator).click()
if private:
self.selenium.find_element(
*self._file_menu_private_window_locator
).click()
else:
self.selenium.find_element(
*self._file_menu_new_window_button_locator
).click()
return self.wait.until(
expected.new_browser_window_is_opened(self.selenium, handles_before),
message="No new browser window opened",
)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/window.py
|
BrowserWindow.notification
|
python
|
def notification(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
try:
root = self.selenium.find_element(*self._notification_locator)
return BaseNotification.create(self, root)
except NoSuchElementException:
pass
try:
notifications = self.selenium.find_elements(
*self._app_menu_notification_locator
)
root = next(n for n in notifications if n.is_displayed())
return BaseNotification.create(self, root)
except StopIteration:
pass
return None
|
Provide access to the currently displayed notification.
Returns:
:py:class:`BaseNotification`: FoxPuppet BaseNotification object.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L43-L64
|
[
"def create(window, root):\n \"\"\"Create a notification object.\n\n Args:\n window (:py:class:`BrowserWindow`): Window object this region\n appears in.\n root\n (:py:class:`~selenium.webdriver.remote.webelement.WebElement`):\n WebDriver element object that serves as the root for the\n notification.\n\n Returns:\n :py:class:`BaseNotification`: Firefox notification.\n\n \"\"\"\n notifications = {}\n _id = root.get_property(\"id\")\n from foxpuppet.windows.browser.notifications import addons\n\n notifications.update(addons.NOTIFICATIONS)\n return notifications.get(_id, BaseNotification)(window, root)\n"
] |
class BrowserWindow(BaseWindow):
"""Representation of a browser window."""
_file_menu_button_locator = (By.ID, "file-menu")
_file_menu_private_window_locator = (By.ID, "menu_newPrivateWindow")
_file_menu_new_window_button_locator = (By.ID, "menu_newNavigator")
_nav_bar_locator = (By.ID, "nav-bar")
_notification_locator = (By.CSS_SELECTOR, "#notification-popup popupnotification")
_app_menu_notification_locator = (
By.CSS_SELECTOR,
"#appMenu-notification-popup popupnotification",
)
_tab_browser_locator = (By.ID, "tabbrowser-tabs")
@property
def navbar(self):
"""Provide access to the Navigation Bar.
Returns:
:py:class:`NavBar`: FoxPuppet NavBar object.
"""
window = BaseWindow(self.selenium, self.selenium.current_window_handle)
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.selenium.find_element(*self._nav_bar_locator)
return NavBar(window, el)
@property
# no notification is displayed
def wait_for_notification(self, notification_class=BaseNotification):
"""Wait for the specified notification to be displayed.
Args:
notification_class (:py:class:`BaseNotification`, optional):
The notification class to wait for. If `None` is specified it
will wait for any notification to be closed. Defaults to
`BaseNotification`.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
if notification_class:
if notification_class is BaseNotification:
message = "No notification was shown."
else:
message = "{0} was not shown.".format(notification_class.__name__)
self.wait.until(
lambda _: isinstance(self.notification, notification_class),
message=message,
)
return self.notification
else:
self.wait.until(
lambda _: self.notification is None,
message="Unexpected notification shown.",
)
@property
def is_private(self):
"""Property that checks if the specified window is private or not.
Returns:
bool: True if this is a Private Browsing window.
"""
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.selenium.execute_script(
"""
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
let chromeWindow = arguments[0].ownerDocument.defaultView;
return PrivateBrowsingUtils.isWindowPrivate(chromeWindow);
""",
self.document_element,
)
def open_window(self, private=False):
"""Open a new browser window.
Args:
private (bool): Optional parameter to open a private browsing
window. Defaults to False.
Returns:
:py:class:`BrowserWindow`: Opened window.
"""
handles_before = self.selenium.window_handles
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
# Opens private or non-private window
self.selenium.find_element(*self._file_menu_button_locator).click()
if private:
self.selenium.find_element(
*self._file_menu_private_window_locator
).click()
else:
self.selenium.find_element(
*self._file_menu_new_window_button_locator
).click()
return self.wait.until(
expected.new_browser_window_is_opened(self.selenium, handles_before),
message="No new browser window opened",
)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/window.py
|
BrowserWindow.wait_for_notification
|
python
|
def wait_for_notification(self, notification_class=BaseNotification):
if notification_class:
if notification_class is BaseNotification:
message = "No notification was shown."
else:
message = "{0} was not shown.".format(notification_class.__name__)
self.wait.until(
lambda _: isinstance(self.notification, notification_class),
message=message,
)
return self.notification
else:
self.wait.until(
lambda _: self.notification is None,
message="Unexpected notification shown.",
)
|
Wait for the specified notification to be displayed.
Args:
notification_class (:py:class:`BaseNotification`, optional):
The notification class to wait for. If `None` is specified it
will wait for any notification to be closed. Defaults to
`BaseNotification`.
Returns:
:py:class:`BaseNotification`: Firefox notification.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L66-L93
| null |
class BrowserWindow(BaseWindow):
"""Representation of a browser window."""
_file_menu_button_locator = (By.ID, "file-menu")
_file_menu_private_window_locator = (By.ID, "menu_newPrivateWindow")
_file_menu_new_window_button_locator = (By.ID, "menu_newNavigator")
_nav_bar_locator = (By.ID, "nav-bar")
_notification_locator = (By.CSS_SELECTOR, "#notification-popup popupnotification")
_app_menu_notification_locator = (
By.CSS_SELECTOR,
"#appMenu-notification-popup popupnotification",
)
_tab_browser_locator = (By.ID, "tabbrowser-tabs")
@property
def navbar(self):
"""Provide access to the Navigation Bar.
Returns:
:py:class:`NavBar`: FoxPuppet NavBar object.
"""
window = BaseWindow(self.selenium, self.selenium.current_window_handle)
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.selenium.find_element(*self._nav_bar_locator)
return NavBar(window, el)
@property
def notification(self):
"""Provide access to the currently displayed notification.
Returns:
:py:class:`BaseNotification`: FoxPuppet BaseNotification object.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
try:
root = self.selenium.find_element(*self._notification_locator)
return BaseNotification.create(self, root)
except NoSuchElementException:
pass
try:
notifications = self.selenium.find_elements(
*self._app_menu_notification_locator
)
root = next(n for n in notifications if n.is_displayed())
return BaseNotification.create(self, root)
except StopIteration:
pass
return None # no notification is displayed
@property
def is_private(self):
"""Property that checks if the specified window is private or not.
Returns:
bool: True if this is a Private Browsing window.
"""
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.selenium.execute_script(
"""
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
let chromeWindow = arguments[0].ownerDocument.defaultView;
return PrivateBrowsingUtils.isWindowPrivate(chromeWindow);
""",
self.document_element,
)
def open_window(self, private=False):
"""Open a new browser window.
Args:
private (bool): Optional parameter to open a private browsing
window. Defaults to False.
Returns:
:py:class:`BrowserWindow`: Opened window.
"""
handles_before = self.selenium.window_handles
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
# Opens private or non-private window
self.selenium.find_element(*self._file_menu_button_locator).click()
if private:
self.selenium.find_element(
*self._file_menu_private_window_locator
).click()
else:
self.selenium.find_element(
*self._file_menu_new_window_button_locator
).click()
return self.wait.until(
expected.new_browser_window_is_opened(self.selenium, handles_before),
message="No new browser window opened",
)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/window.py
|
BrowserWindow.is_private
|
python
|
def is_private(self):
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.selenium.execute_script(
"""
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
let chromeWindow = arguments[0].ownerDocument.defaultView;
return PrivateBrowsingUtils.isWindowPrivate(chromeWindow);
""",
self.document_element,
)
|
Property that checks if the specified window is private or not.
Returns:
bool: True if this is a Private Browsing window.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L96-L113
|
[
"def switch_to(self):\n \"\"\"Switch focus for Selenium commands to this window.\"\"\"\n self.selenium.switch_to.window(self.handle)\n"
] |
class BrowserWindow(BaseWindow):
"""Representation of a browser window."""
_file_menu_button_locator = (By.ID, "file-menu")
_file_menu_private_window_locator = (By.ID, "menu_newPrivateWindow")
_file_menu_new_window_button_locator = (By.ID, "menu_newNavigator")
_nav_bar_locator = (By.ID, "nav-bar")
_notification_locator = (By.CSS_SELECTOR, "#notification-popup popupnotification")
_app_menu_notification_locator = (
By.CSS_SELECTOR,
"#appMenu-notification-popup popupnotification",
)
_tab_browser_locator = (By.ID, "tabbrowser-tabs")
@property
def navbar(self):
"""Provide access to the Navigation Bar.
Returns:
:py:class:`NavBar`: FoxPuppet NavBar object.
"""
window = BaseWindow(self.selenium, self.selenium.current_window_handle)
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.selenium.find_element(*self._nav_bar_locator)
return NavBar(window, el)
@property
def notification(self):
"""Provide access to the currently displayed notification.
Returns:
:py:class:`BaseNotification`: FoxPuppet BaseNotification object.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
try:
root = self.selenium.find_element(*self._notification_locator)
return BaseNotification.create(self, root)
except NoSuchElementException:
pass
try:
notifications = self.selenium.find_elements(
*self._app_menu_notification_locator
)
root = next(n for n in notifications if n.is_displayed())
return BaseNotification.create(self, root)
except StopIteration:
pass
return None # no notification is displayed
def wait_for_notification(self, notification_class=BaseNotification):
"""Wait for the specified notification to be displayed.
Args:
notification_class (:py:class:`BaseNotification`, optional):
The notification class to wait for. If `None` is specified it
will wait for any notification to be closed. Defaults to
`BaseNotification`.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
if notification_class:
if notification_class is BaseNotification:
message = "No notification was shown."
else:
message = "{0} was not shown.".format(notification_class.__name__)
self.wait.until(
lambda _: isinstance(self.notification, notification_class),
message=message,
)
return self.notification
else:
self.wait.until(
lambda _: self.notification is None,
message="Unexpected notification shown.",
)
@property
def open_window(self, private=False):
"""Open a new browser window.
Args:
private (bool): Optional parameter to open a private browsing
window. Defaults to False.
Returns:
:py:class:`BrowserWindow`: Opened window.
"""
handles_before = self.selenium.window_handles
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
# Opens private or non-private window
self.selenium.find_element(*self._file_menu_button_locator).click()
if private:
self.selenium.find_element(
*self._file_menu_private_window_locator
).click()
else:
self.selenium.find_element(
*self._file_menu_new_window_button_locator
).click()
return self.wait.until(
expected.new_browser_window_is_opened(self.selenium, handles_before),
message="No new browser window opened",
)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/window.py
|
BrowserWindow.open_window
|
python
|
def open_window(self, private=False):
handles_before = self.selenium.window_handles
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
# Opens private or non-private window
self.selenium.find_element(*self._file_menu_button_locator).click()
if private:
self.selenium.find_element(
*self._file_menu_private_window_locator
).click()
else:
self.selenium.find_element(
*self._file_menu_new_window_button_locator
).click()
return self.wait.until(
expected.new_browser_window_is_opened(self.selenium, handles_before),
message="No new browser window opened",
)
|
Open a new browser window.
Args:
private (bool): Optional parameter to open a private browsing
window. Defaults to False.
Returns:
:py:class:`BrowserWindow`: Opened window.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/window.py#L115-L144
|
[
"def switch_to(self):\n \"\"\"Switch focus for Selenium commands to this window.\"\"\"\n self.selenium.switch_to.window(self.handle)\n"
] |
class BrowserWindow(BaseWindow):
"""Representation of a browser window."""
_file_menu_button_locator = (By.ID, "file-menu")
_file_menu_private_window_locator = (By.ID, "menu_newPrivateWindow")
_file_menu_new_window_button_locator = (By.ID, "menu_newNavigator")
_nav_bar_locator = (By.ID, "nav-bar")
_notification_locator = (By.CSS_SELECTOR, "#notification-popup popupnotification")
_app_menu_notification_locator = (
By.CSS_SELECTOR,
"#appMenu-notification-popup popupnotification",
)
_tab_browser_locator = (By.ID, "tabbrowser-tabs")
@property
def navbar(self):
"""Provide access to the Navigation Bar.
Returns:
:py:class:`NavBar`: FoxPuppet NavBar object.
"""
window = BaseWindow(self.selenium, self.selenium.current_window_handle)
with self.selenium.context(self.selenium.CONTEXT_CHROME):
el = self.selenium.find_element(*self._nav_bar_locator)
return NavBar(window, el)
@property
def notification(self):
"""Provide access to the currently displayed notification.
Returns:
:py:class:`BaseNotification`: FoxPuppet BaseNotification object.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
try:
root = self.selenium.find_element(*self._notification_locator)
return BaseNotification.create(self, root)
except NoSuchElementException:
pass
try:
notifications = self.selenium.find_elements(
*self._app_menu_notification_locator
)
root = next(n for n in notifications if n.is_displayed())
return BaseNotification.create(self, root)
except StopIteration:
pass
return None # no notification is displayed
def wait_for_notification(self, notification_class=BaseNotification):
"""Wait for the specified notification to be displayed.
Args:
notification_class (:py:class:`BaseNotification`, optional):
The notification class to wait for. If `None` is specified it
will wait for any notification to be closed. Defaults to
`BaseNotification`.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
if notification_class:
if notification_class is BaseNotification:
message = "No notification was shown."
else:
message = "{0} was not shown.".format(notification_class.__name__)
self.wait.until(
lambda _: isinstance(self.notification, notification_class),
message=message,
)
return self.notification
else:
self.wait.until(
lambda _: self.notification is None,
message="Unexpected notification shown.",
)
@property
def is_private(self):
"""Property that checks if the specified window is private or not.
Returns:
bool: True if this is a Private Browsing window.
"""
self.switch_to()
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.selenium.execute_script(
"""
Components.utils.import("resource://gre/modules/PrivateBrowsingUtils.jsm");
let chromeWindow = arguments[0].ownerDocument.defaultView;
return PrivateBrowsingUtils.isWindowPrivate(chromeWindow);
""",
self.document_element,
)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/base.py
|
BaseNotification.create
|
python
|
def create(window, root):
notifications = {}
_id = root.get_property("id")
from foxpuppet.windows.browser.notifications import addons
notifications.update(addons.NOTIFICATIONS)
return notifications.get(_id, BaseNotification)(window, root)
|
Create a notification object.
Args:
window (:py:class:`BrowserWindow`): Window object this region
appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
notification.
Returns:
:py:class:`BaseNotification`: Firefox notification.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/base.py#L19-L39
| null |
class BaseNotification(Region):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@staticmethod
@property
def label(self):
"""Provide access to the notification label.
Returns:
str: The notification label
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("label")
@property
def origin(self):
"""Provide access to the notification origin.
Returns:
str: The notification origin.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("origin")
def find_primary_button(self):
"""Retrieve the primary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-primary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "button")
def find_secondary_button(self):
"""Retrieve the secondary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-secondary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "secondarybutton")
def find_description(self):
"""Retrieve the notification description."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-description")
return self.root.find_anonymous_element_by_attribute(
"class", "popup-notification-description")
def find_close_button(self):
"""Retrieve the close button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-closebutton")
return self.root.find_anonymous_element_by_attribute(
"anonid", "closebutton")
def close(self):
"""Close the notification."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_close_button().click()
self.window.wait_for_notification(None)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/base.py
|
BaseNotification.label
|
python
|
def label(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("label")
|
Provide access to the notification label.
Returns:
str: The notification label
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/base.py#L42-L50
| null |
class BaseNotification(Region):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@staticmethod
def create(window, root):
"""Create a notification object.
Args:
window (:py:class:`BrowserWindow`): Window object this region
appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
notification.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
notifications = {}
_id = root.get_property("id")
from foxpuppet.windows.browser.notifications import addons
notifications.update(addons.NOTIFICATIONS)
return notifications.get(_id, BaseNotification)(window, root)
@property
@property
def origin(self):
"""Provide access to the notification origin.
Returns:
str: The notification origin.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("origin")
def find_primary_button(self):
"""Retrieve the primary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-primary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "button")
def find_secondary_button(self):
"""Retrieve the secondary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-secondary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "secondarybutton")
def find_description(self):
"""Retrieve the notification description."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-description")
return self.root.find_anonymous_element_by_attribute(
"class", "popup-notification-description")
def find_close_button(self):
"""Retrieve the close button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-closebutton")
return self.root.find_anonymous_element_by_attribute(
"anonid", "closebutton")
def close(self):
"""Close the notification."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_close_button().click()
self.window.wait_for_notification(None)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/base.py
|
BaseNotification.origin
|
python
|
def origin(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("origin")
|
Provide access to the notification origin.
Returns:
str: The notification origin.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/base.py#L53-L61
| null |
class BaseNotification(Region):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@staticmethod
def create(window, root):
"""Create a notification object.
Args:
window (:py:class:`BrowserWindow`): Window object this region
appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
notification.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
notifications = {}
_id = root.get_property("id")
from foxpuppet.windows.browser.notifications import addons
notifications.update(addons.NOTIFICATIONS)
return notifications.get(_id, BaseNotification)(window, root)
@property
def label(self):
"""Provide access to the notification label.
Returns:
str: The notification label
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("label")
@property
def find_primary_button(self):
"""Retrieve the primary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-primary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "button")
def find_secondary_button(self):
"""Retrieve the secondary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-secondary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "secondarybutton")
def find_description(self):
"""Retrieve the notification description."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-description")
return self.root.find_anonymous_element_by_attribute(
"class", "popup-notification-description")
def find_close_button(self):
"""Retrieve the close button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-closebutton")
return self.root.find_anonymous_element_by_attribute(
"anonid", "closebutton")
def close(self):
"""Close the notification."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_close_button().click()
self.window.wait_for_notification(None)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/base.py
|
BaseNotification.find_primary_button
|
python
|
def find_primary_button(self):
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-primary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "button")
|
Retrieve the primary button.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/base.py#L63-L69
| null |
class BaseNotification(Region):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@staticmethod
def create(window, root):
"""Create a notification object.
Args:
window (:py:class:`BrowserWindow`): Window object this region
appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
notification.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
notifications = {}
_id = root.get_property("id")
from foxpuppet.windows.browser.notifications import addons
notifications.update(addons.NOTIFICATIONS)
return notifications.get(_id, BaseNotification)(window, root)
@property
def label(self):
"""Provide access to the notification label.
Returns:
str: The notification label
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("label")
@property
def origin(self):
"""Provide access to the notification origin.
Returns:
str: The notification origin.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("origin")
def find_secondary_button(self):
"""Retrieve the secondary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-secondary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "secondarybutton")
def find_description(self):
"""Retrieve the notification description."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-description")
return self.root.find_anonymous_element_by_attribute(
"class", "popup-notification-description")
def find_close_button(self):
"""Retrieve the close button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-closebutton")
return self.root.find_anonymous_element_by_attribute(
"anonid", "closebutton")
def close(self):
"""Close the notification."""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_close_button().click()
self.window.wait_for_notification(None)
|
mozilla/FoxPuppet
|
foxpuppet/windows/browser/notifications/base.py
|
BaseNotification.close
|
python
|
def close(self):
with self.selenium.context(self.selenium.CONTEXT_CHROME):
self.find_close_button().click()
self.window.wait_for_notification(None)
|
Close the notification.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/browser/notifications/base.py#L95-L99
|
[
"def find_close_button(self):\n \"\"\"Retrieve the close button.\"\"\"\n if self.window.firefox_version >= 67:\n return self.root.find_element(\n By.CLASS_NAME, \"popup-notification-closebutton\")\n return self.root.find_anonymous_element_by_attribute(\n \"anonid\", \"closebutton\")\n"
] |
class BaseNotification(Region):
"""Abstract base class for any kind of notification."""
__metaclass__ = ABCMeta
@staticmethod
def create(window, root):
"""Create a notification object.
Args:
window (:py:class:`BrowserWindow`): Window object this region
appears in.
root
(:py:class:`~selenium.webdriver.remote.webelement.WebElement`):
WebDriver element object that serves as the root for the
notification.
Returns:
:py:class:`BaseNotification`: Firefox notification.
"""
notifications = {}
_id = root.get_property("id")
from foxpuppet.windows.browser.notifications import addons
notifications.update(addons.NOTIFICATIONS)
return notifications.get(_id, BaseNotification)(window, root)
@property
def label(self):
"""Provide access to the notification label.
Returns:
str: The notification label
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("label")
@property
def origin(self):
"""Provide access to the notification origin.
Returns:
str: The notification origin.
"""
with self.selenium.context(self.selenium.CONTEXT_CHROME):
return self.root.get_attribute("origin")
def find_primary_button(self):
"""Retrieve the primary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-primary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "button")
def find_secondary_button(self):
"""Retrieve the secondary button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-secondary-button")
return self.root.find_anonymous_element_by_attribute(
"anonid", "secondarybutton")
def find_description(self):
"""Retrieve the notification description."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-description")
return self.root.find_anonymous_element_by_attribute(
"class", "popup-notification-description")
def find_close_button(self):
"""Retrieve the close button."""
if self.window.firefox_version >= 67:
return self.root.find_element(
By.CLASS_NAME, "popup-notification-closebutton")
return self.root.find_anonymous_element_by_attribute(
"anonid", "closebutton")
|
mozilla/FoxPuppet
|
foxpuppet/windows/manager.py
|
WindowManager.windows
|
python
|
def windows(self):
from foxpuppet.windows import BrowserWindow
return [
BrowserWindow(self.selenium, handle)
for handle in self.selenium.window_handles
]
|
Return a list of all open windows.
Returns:
list: List of FoxPuppet BrowserWindow objects.
|
train
|
https://github.com/mozilla/FoxPuppet/blob/6575eb4c72fd024c986b254e198c8b4e6f68cddd/foxpuppet/windows/manager.py#L26-L38
| null |
class WindowManager(object):
"""A window manager that controls the creation of window objects.
Args:
selenium: (:py:class:`~selenium.webdriver.remote.webdriver.WebDriver`):
Firefox WebDriver object.
"""
def __init__(self, selenium):
"""Create WindowManager Object.
Args:
selenium:
(:py:class:`~selenium.webdriver.remote.webdriver.WebDriver`):
Firefox WebDriver object.
"""
self.selenium = selenium
@property
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
chebyshev
|
python
|
def chebyshev(point1, point2):
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
|
Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L26-L37
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
hilbertrot
|
python
|
def hilbertrot(n, x, y, rx, ry):
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
|
Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L52-L61
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
circlescan
|
python
|
def circlescan(x0, y0, r1, r2):
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
|
Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L392-L466
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
gridscan
|
python
|
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
|
Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L468-L496
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
hilbertscan
|
python
|
def hilbertscan(size, distance):
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
|
Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L498-L526
|
[
"def hilbertrot(n, x, y, rx, ry):\n \"\"\"Rotates and flips a quadrant appropriately for the Hilbert scan\n generator. See https://en.wikipedia.org/wiki/Hilbert_curve.\n \"\"\"\n if ry == 0:\n if rx == 1:\n x = n - 1 - x\n y = n - 1 - y\n return y, x\n return x, y\n"
] |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
ringscan
|
python
|
def ringscan(x0, y0, r1, r2, metric=chebyshev):
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
|
Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L528-L604
|
[
"def chebyshev(point1, point2):\n \"\"\"Computes distance between 2D points using chebyshev metric\n\n :param point1: 1st point\n :type point1: list\n :param point2: 2nd point\n :type point2: list\n :returns: Distance between point1 and point2\n :rtype: float\n \"\"\"\n\n return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))\n",
"def manhattan(point1, point2):\n \"\"\"Computes distance between 2D points using manhattan metric\n\n :param point1: 1st point\n :type point1: list\n :param point2: 2nd point\n :type point2: list\n :returns: Distance between point1 and point2\n :rtype: float\n \"\"\"\n\n return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])\n",
"def badmetric(x, y):\n return 3\n"
] |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
snakescan
|
python
|
def snakescan(xi, yi, xf, yf):
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
|
Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L606-L635
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
"""Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
"""
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
walkscan
|
python
|
def walkscan(x0, y0, xn=0.25, xp=0.25, yn=0.25, yp=0.25):
# Validate inputs
if xn < 0: raise ValueError("Negative x probabilty must be non-negative")
if xp < 0: raise ValueError("Positive x probabilty must be non-negative")
if yn < 0: raise ValueError("Negative y probabilty must be non-negative")
if yp < 0: raise ValueError("Positive y probabilty must be non-negative")
# Compute normalized probability
total = xp + xn + yp + yn
xn /= total
xp /= total
yn /= total
yp /= total
# Compute cumulative probability
cxn = xn
cxp = cxn + xp
cyn = cxp + yn
# Initialize position
x, y = x0, y0
while True:
yield x, y
# Take random step
probability = random.random()
if probability <= cxn:
x -= 1
elif probability <= cxp:
x += 1
elif probability <= cyn:
y -= 1
else:
y += 1
|
Scan pixels in a random walk pattern with given step probabilities. The
random walk will continue indefinitely unless a skip transformation is used
with the 'stop' parameter set or a clip transformation is used with the
'abort' parameter set to True. The probabilities are normalized to sum to 1.
:param x0: Initial x-coordinate
:type x0: int
:param y0: Initial y-coordinate
:type y0: int
:param xn: Probability of moving in the negative x direction
:type xn: float
:param xp: Probability of moving in the positive x direction
:type xp: float
:param yn: Probability of moving in the negative y direction
:type yn: float
:param yp: Probability of moving in the positive y direction
:type yp: float
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L637-L691
| null |
#!/usr/bin/python
# AUTHOR
# Daniel Pulido <dpmcmlxxvi@gmail.com>
# COPYRIGHT
# Copyright (c) 2015 Daniel Pulido <dpmcmlxxvi@gmail.com>
# LICENSE
# MIT License (http://opensource.org/licenses/MIT)
"""
Various patterns to scan pixels on a grid. Rectangular patterns are scanned
first along the x-coordinate then the y-coordinate. Radial patterns are
scanned clockwise. Transformation filters are available to apply
standard transformations (e.g., rotation, scale, translation) on the
coordinates.
"""
import math
import random
import sys
# ======================================================================
# Distance metrics
# ----------------------------------------------------------------------
def chebyshev(point1, point2):
"""Computes distance between 2D points using chebyshev metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return max(abs(point1[0] - point2[0]), abs(point1[1] - point2[1]))
def manhattan(point1, point2):
"""Computes distance between 2D points using manhattan metric
:param point1: 1st point
:type point1: list
:param point2: 2nd point
:type point2: list
:returns: Distance between point1 and point2
:rtype: float
"""
return abs(point1[0] - point2[0]) + abs(point1[1] - point2[1])
def hilbertrot(n, x, y, rx, ry):
"""Rotates and flips a quadrant appropriately for the Hilbert scan
generator. See https://en.wikipedia.org/wiki/Hilbert_curve.
"""
if ry == 0:
if rx == 1:
x = n - 1 - x
y = n - 1 - y
return y, x
return x, y
# ======================================================================
# Scan transformations
# ----------------------------------------------------------------------
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
class swap(object):
"""Swap x and y coordinates
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
return y, x
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
def next(self):
"""Next point in iteration
"""
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
# ======================================================================
# Scan patterns
# ----------------------------------------------------------------------
def circlescan(x0, y0, r1, r2):
"""Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
# List of pixels visited in previous diameter
previous = []
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distance == 0:
yield x0, y0
else:
# Computes points for first octant and the rotate by multiples of
# 45 degrees to compute the other octants
a = 0.707107
rotations = {0: [[ 1, 0], [ 0, 1]],
1: [[ a, a], [-a, a]],
2: [[ 0, 1], [-1, 0]],
3: [[-a, a], [-a,-a]],
4: [[-1, 0], [ 0,-1]],
5: [[-a,-a], [ a,-a]],
6: [[ 0,-1], [ 1, 0]],
7: [[ a,-a], [ a, a]]}
nangles = len(rotations)
# List of pixels visited in current diameter
current = []
for angle in range(nangles):
x = 0
y = distance
d = 1 - distance
while x < y:
xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y
yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y
xr = x0 + xr
yr = y0 + yr
# First check if point was in previous diameter
# since our scan pattern can lead to duplicates in
# neighboring diameters
point = (int(round(xr)), int(round(yr)))
if point not in previous:
yield xr, yr
current.append(point)
# Move pixel according to circle constraint
if (d < 0):
d += 3 + 2 * x
else:
d += 5 - 2 * (y-x)
y -= 1
x += 1
previous = current
def gridscan(xi, yi, xf, yf, stepx=1, stepy=1):
"""Scan pixels in a grid pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:param stepx: Step size in x-coordinate
:type stepx: int
:param stepy: Step size in y-coordinate
:type stepy: int
:returns: Coordinate generator
:rtype: function
"""
if stepx <= 0: raise ValueError("X-step must be positive")
if stepy <= 0: raise ValueError("Y-step must be positive")
# Determine direction to move
dx = stepx if xf >= xi else -stepx
dy = stepy if yf >= yi else -stepy
for y in range(yi, yf + dy, dy):
for x in range(xi, xf + dx, dx):
yield x, y
def hilbertscan(size, distance):
"""Scan pixels in a Hilbert curve pattern in the first quadrant. Modified
algorithm from https://en.wikipedia.org/wiki/Hilbert_curve.
:param size: Size of enclosing square
:type size: int
:param distance: Distance along curve (Must be smaller than size**2 - 1)
:type distance: int
:returns: Coordinate generator
:rtype: function
"""
size = 2*(1<<(size-1).bit_length());
if (distance > size**2 - 1): raise StopIteration("Invalid distance!")
for d in range(distance):
t = d
x = 0
y = 0
s = 1
while (s < size):
rx = 1 & (t / 2)
ry = 1 & (t ^ rx)
x, y = hilbertrot(s, x, y, rx, ry)
x += s * rx
y += s * ry
t /= 4
s *= 2
yield x, y
def ringscan(x0, y0, r1, r2, metric=chebyshev):
"""Scan pixels in a ring pattern around a center point clockwise
:param x0: Center x-coordinate
:type x0: int
:param y0: Center y-coordinate
:type y0: int
:param r1: Initial radius
:type r1: int
:param r2: Final radius
:type r2: int
:param metric: Distance metric
:type metric: function
:returns: Coordinate generator
:rtype: function
"""
# Validate inputs
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
if not hasattr(metric, "__call__"): raise TypeError("Metric not callable")
# Define clockwise step directions
direction = 0
steps = {0: [ 1, 0],
1: [ 1,-1],
2: [ 0,-1],
3: [-1,-1],
4: [-1, 0],
5: [-1, 1],
6: [ 0, 1],
7: [ 1, 1]}
nsteps = len(steps)
center = [x0, y0]
# Scan distances outward (1) or inward (-1)
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
initial = [x0, y0 + distance]
current = initial
# Number of tries to find a valid neighrbor
ntrys = 0
while True:
# Short-circuit special case
if distance == 0:
yield current[0], current[1]
break
# Try and take a step and check if still within distance
nextpoint = [current[i] + steps[direction][i] for i in range(2)]
if metric(center, nextpoint) != distance:
# Check if we tried all step directions and failed
ntrys += 1
if ntrys == nsteps:
break
# Try the next direction
direction = (direction + 1) % nsteps
continue
ntrys = 0
yield current[0], current[1]
# Check if we have come all the way around
current = nextpoint
if current == initial:
break
# Check if we tried all step directions and failed
if ntrys == nsteps:
break
def snakescan(xi, yi, xf, yf):
"""Scan pixels in a snake pattern along the x-coordinate then y-coordinate
:param xi: Initial x-coordinate
:type xi: int
:param yi: Initial y-coordinate
:type yi: int
:param xf: Final x-coordinate
:type xf: int
:param yf: Final y-coordinate
:type yf: int
:returns: Coordinate generator
:rtype: function
"""
# Determine direction to move
dx = 1 if xf >= xi else -1
dy = 1 if yf >= yi else -1
# Scan pixels first along x-coordinate then y-coordinate and flip
# x-direction when the end of the line is reached
x, xa, xb = xi, xi, xf
for y in range(yi, yf + dy, dy):
for x in range(xa, xb + dx, dx):
yield x, y
# Swap x-direction
if x == xa or x == xb:
dx *= -1
xa, xb = xb, xa
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
clip.next
|
python
|
def next(self):
while True:
x, y = next(self.scan)
if self.predicate is not None and not self.predicate(x,y):
if self.abort: raise StopIteration("Boundary crossed!")
elif (x < self.minx or
x > self.maxx or
y < self.miny or
y > self.maxy):
if self.abort: raise StopIteration("Boundary crossed!")
else:
return x, y
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L108-L121
| null |
class clip(object):
"""Clip coordinates that exceed boundary
"""
def __init__(self,
scan,
minx=-sys.maxint,
maxx=sys.maxint,
miny=-sys.maxint,
maxy=sys.maxint,
predicate=None,
abort=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param minx: Minimum x-coordinate (default = -sys.maxint)
:type minx: int
:param maxx: Maximum x-coordinate (default = sys.maxint)
:type maxx: int
:param miny: Minimum y-coordinate (default = -sys.maxint)
:type miny: int
:param maxy: Maximum y-coordinate (default = sys.maxint)
:type maxy: int
:param predicate: Optional function that takes 2 arguments (x and y)
and returns true if coordinate should be kept
otherwise false (default = None)
:type predicate: function
:param abort: Abort iteration if boundary is crossed
:type abort: bool
"""
self.scan = scan
self.minx = minx
self.maxx = maxx
self.miny = miny
self.maxy = maxy
self.predicate = predicate
self.abort = abort
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
reflection.next
|
python
|
def next(self):
x, y = next(self.scan)
xr = -x if self.rx else x
yr = -y if self.ry else y
return xr, yr
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L142-L148
| null |
class reflection(object):
"""Reflect coordinates about x and y axes
"""
def __init__(self, scan, rx=False, ry=False):
"""
:param scan: Pixel scan generator
:type scan: function
:param rx: True if x-coordinate should be reflected (default=False)
:type rx: bool
:param ry: True if y-coordinate should be reflected (default=False)
:type ry: bool
"""
self.scan = scan
self.rx = rx
self.ry = ry
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
reservoir.next
|
python
|
def next(self):
if self.count < len(self.reservoir):
self.count += 1
return self.reservoir[self.count-1]
raise StopIteration("Reservoir exhausted")
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L186-L193
| null |
class reservoir(object):
def __init__(self, scan, npoints):
"""Randomly sample points using the reservoir sampling method. This is
only useful if you need exactly 'npoints' sampled. Otherwise use the
'sample' transformation to randomly sample at a given rate. This method
requires storing 'npoints' in memory and precomputing the random
selection so it may be slower than 'sample'.
:param scan: Pixel scan generator
:type scan: function
:param npoints: Sample size
:type npoints: int
"""
# Validate inputs
if npoints <= 0: raise ValueError("Sample size must be positive")
self.reservoir = []
self.count = 0
# Populate reservoir
for index, point in enumerate(scan):
if index < npoints:
self.reservoir.append(point)
else:
j = random.randint(0, index)
if j < npoints:
self.reservoir[j] = point
# Shuffle the reservoir in case population was small and the
# points were not sufficiently randomized
random.shuffle(self.reservoir)
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
rotation.next
|
python
|
def next(self):
x, y = next(self.scan)
ca, sa = math.cos(self.angle), math.sin(self.angle)
xr = ca * x - sa * y
yr = sa * x + ca * y
return xr, yr
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L214-L221
| null |
class rotation(object):
"""Rotate coordinates by given angle. If the final transformation axes do
not align with the x and y axes then it may yield duplicate coordinates
during scanning.
"""
def __init__(self, scan, angle=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param angle: Counter-clockwise angle in degrees (default=0)
:type angle: float
"""
self.scan = scan
self.angle = angle * (math.pi / 180.0)
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
sample.next
|
python
|
def next(self):
if self.probability == 1:
x, y = next(self.scan)
else:
while True:
x, y = next(self.scan)
if random.random() <= self.probability: break
return x, y
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L241-L250
| null |
class sample(object):
"""Randomly sample points at the given probability.
"""
def __init__(self, scan, probability=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param probability: Sampling probability in interval [0,1] (default=1)
:type probability: float
"""
if probability < 0 or probability > 1:
raise ValueError("Sampling probability must be in range [0,1]")
self.scan = scan
self.probability = probability
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
scale.next
|
python
|
def next(self):
x, y = next(self.scan)
xr = self.sx * x
yr = self.sy * y
return xr, yr
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L274-L280
| null |
class scale(object):
"""Scale coordinates by given factor
"""
def __init__(self, scan, sx=1, sy=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate scale factor (default=1)
:type sx: float
:param sy: y-coordinate scale factor (default=1)
:type sy: float
"""
if sx <= 0: raise ValueError("X-scale must be positive")
if sy <= 0: raise ValueError("Y-scale must be positive")
self.scan = scan
self.sx = sx
self.sy = sy
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
skip.next
|
python
|
def next(self):
while True:
x, y = next(self.scan)
self.index += 1
if (self.index < self.start): continue
if (self.index > self.stop): raise StopIteration("skip stopping")
if ((self.index-self.start) % self.step != 0): continue
return x, y
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L309-L318
| null |
class skip(object):
"""Skip points at the given step size
"""
def __init__(self, scan, start=0, stop=sys.maxint, step=1):
"""
:param scan: Pixel scan generator
:type scan: function
:param start: Iteration starting 0-based index (default = 0)
:type start: int
:param stop: Iteration stopping 0-based index (default = sys.maxint)
:type stop: int
:param step: Iteration step size (default = 1)
:type step: int
"""
if start < 0: raise ValueError("Start must be non-negative")
if stop < 0: raise ValueError("Stop must be non-negative")
if stop < start: raise ValueError("Stop must be greater than start")
if step <= 0: raise ValueError("Step must be positive")
self.scan = scan
self.start = start
self.stop = stop
self.step = step
self.index = -1
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
snap.next
|
python
|
def next(self):
x, y = next(self.scan)
xs = int(round(x))
ys = int(round(y))
return xs, ys
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L333-L339
| null |
class snap(object):
"""Snap x and y coordinates to a grid point
"""
def __init__(self, scan):
"""
:param scan: Pixel scan generator
:type scan: function
"""
self.scan = scan
def __iter__(self):
return self
|
dpmcmlxxvi/pixelscan
|
pixelscan/pixelscan.py
|
translation.next
|
python
|
def next(self):
x, y = next(self.scan)
xr = x + self.tx
yr = y + self.ty
return xr, yr
|
Next point in iteration
|
train
|
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L380-L386
| null |
class translation(object):
"""Translate coordinates by given offset
"""
def __init__(self, scan, tx=0, ty=0):
"""
:param scan: Pixel scan generator
:type scan: function
:param sx: x-coordinate translation offset (default = 0)
:type sx: float
:param sy: y-coordinate translaation offset (default = 0)
:type sy: float
"""
self.scan = scan
self.tx = tx
self.ty = ty
def __iter__(self):
return self
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.docpie
|
python
|
def docpie(self, argv=None):
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self)
|
match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L128-L168
|
[
"def _drop_non_appeared(self):\n for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):\n self.pop(key)\n",
"def _add_rest_value(self, rest):\n for each in rest:\n default_values = each.get_sys_default_value(\n self.appeared_only, False)\n logger.debug('get rest values %s -> %s', each, default_values)\n common_keys = set(self).intersection(default_values)\n\n for key in common_keys:\n default = default_values[key]\n valued = self[key]\n logger.debug('%s: default(%s), matched(%s)',\n key, default, valued)\n\n if ((default is not True and default is not False) and\n isinstance(default, int)):\n valued = int(valued)\n elif isinstance(default, list):\n if valued is None:\n valued = []\n elif isinstance(valued, list):\n pass\n else:\n valued = [valued]\n\n logger.debug('set %s as %s', key, valued)\n default_values[key] = valued\n\n self.update(default_values)\n",
"def _add_option_value(self):\n # add left option, add default value\n for options in self.options.values():\n for each in options:\n option = each[0]\n names = option.names\n default = option.default\n this_value = option.value\n\n logger.debug('%s/%s/%s', option, default, this_value)\n\n name_in_value = names.intersection(self)\n if name_in_value: # add default if necessary\n one_name = name_in_value.pop()\n logger.debug('in names, pop %s, self %s', one_name, self)\n value_in_usage = self[one_name]\n if not value_in_usage: # need default\n if default is None: # no default, use old matched one\n final_value = value_in_usage\n elif (each.repeat or\n (value_in_usage is not True and\n value_in_usage is not False and\n isinstance(value_in_usage, (int, list)))):\n final_value = default.split()\n else:\n final_value = default\n else:\n final_value = value_in_usage\n if option.ref is None and each.repeat:\n final_value = int(final_value or 0)\n # just add this key-value.\n # Note all option here never been matched\n elif self.appeared_only:\n continue\n else:\n ref = option.ref\n\n if default is not None:\n if (each.repeat or\n (this_value not in (True, False) and\n isinstance(this_value, (int, list)))):\n final_value = default.split()\n else:\n if ref is not None and max(ref.arg_range()) > 1:\n final_value = default.split()\n else:\n final_value = default\n else:\n if ref is not None:\n arg_range = ref.arg_range()\n # if min(arg_range) != 0:\n # # It requires at least a value\n # logger.debug('%s expects value', option)\n # raise DocpieExit(DocpieException.usage_str)\n if max(arg_range) == 1:\n final_value = None\n else:\n assert max(arg_range) > 1\n final_value = []\n # ref is None\n elif this_value is None:\n final_value = 0 if each.repeat else False\n else:\n final_value = \\\n int(this_value) if each.repeat else this_value\n\n logger.debug('set %s value %s', names, final_value)\n final = {}\n for name in names:\n final[name] = final_value\n self.update(final)\n",
"def _dashes_value(self, dashes):\n result = self['--'] if '--' in self else dashes\n if self.options_first:\n if result is True:\n result = False\n elif result is False:\n pass\n elif isinstance(result, int):\n result = max(0, result - 1)\n\n if self.auto2dashes:\n result = bool(result)\n\n self['--'] = result\n",
"def _prepare_token(self, argv):\n if argv is None:\n argv = sys.argv\n elif isinstance(argv, StrType):\n argv = argv.split()\n\n # the things in extra may not be announced\n all_opt_requried_max_args = dict.fromkeys(self.extra, 0)\n all_opt_requried_max_args.update(self.opt_names_required_max_args)\n token = Argv(argv[1:], self.auto2dashes or self.options_first,\n self.stdopt, self.attachopt, self.attachvalue,\n all_opt_requried_max_args)\n none_or_error = token.formal(self.options_first)\n logger.debug('formal token: %s; error: %s', token, none_or_error)\n if none_or_error is not None:\n return self.exception_handler(none_or_error)\n return token\n",
"def _match(self, token):\n for each in self.usages:\n logger.debug('matching usage %s', each)\n argv_clone = token.clone()\n if each.match(argv_clone, False):\n logger.debug('matched usage %s, checking rest argv %s',\n each, argv_clone)\n if (not argv_clone or\n (argv_clone.auto_dashes and\n list(argv_clone) == ['--'])):\n argv_clone.check_dash()\n logger.debug('matched usage %s / %s', each, argv_clone)\n return each, argv_clone.dashes\n\n logger.debug('matching %s left %s, checking failed',\n each, argv_clone)\n\n each.reset()\n logger.debug('failed matching usage %s / %s', each, argv_clone)\n",
"def check_flag_and_handler(self, token):\n need_arg = [name for name, expect in\n self.opt_names_required_max_args.items() if expect != 0]\n options = set()\n\n for ele in token:\n if self.auto2dashes and ele == '--':\n break\n if ele.startswith('-') and ele != '-':\n options.add(ele)\n\n for inputted in options:\n\n found = False\n for auto, handler in self.extra.items():\n if not callable(handler):\n continue\n\n if auto.startswith('--') and inputted.startswith('--'):\n logger.debug('check %s for %s', inputted, auto)\n if '=' in inputted:\n inputted = inputted.split('=', 1)[0]\n if inputted == auto:\n found = True\n break\n\n elif auto[1] != '-' and inputted[1] != '-':\n logger.debug('check %s for %s', inputted, auto)\n if self.stdopt:\n attachopt = self.attachopt\n break_upper = False\n for index, attached_name in enumerate(inputted[1:]):\n if not attachopt and index > 0:\n break\n\n logger.debug(\n 'check %s for %s', attached_name, auto\n )\n\n stacked_name = '-' + attached_name\n if stacked_name == auto:\n found = True\n logger.debug('find %s in %s', auto, inputted)\n\n if stacked_name in need_arg:\n break_upper = True\n break\n\n if found or break_upper: # break upper loop\n break\n else:\n found = (inputted == auto)\n\n if found:\n logger.debug('find %s, auto handle it', auto)\n handler(self, auto)\n",
"def exception_handler(self, error):\n logger.debug('handling %r', error)\n\n if self.option_sections:\n help_msg = ('%s\\n\\n%s' %\n (self.usage_text.rstrip(),\n '\\n'.join(self.option_sections.values())))\n else:\n help_msg = self.usage_text\n\n helpstyle = self.helpstyle\n if helpstyle == 'python':\n if self.option_sections: # option section will help dedent\n formated_help_msg = self.help_style_python(help_msg)\n else: # only need to dedent it\n formated_help_msg = self.help_style_dedent(help_msg)\n elif helpstyle == 'dedent':\n formated_help_msg = self.help_style_dedent(help_msg)\n else:\n formated_help_msg = help_msg\n\n args = list(error.args)\n message = args[0]\n if message is not None:\n formated_help_msg = '%s\\n\\n%s' % (message, formated_help_msg)\n\n # remove `\\n` because `raise` will auto add\n args[0] = formated_help_msg.rstrip()\n error = self.clone_exception(error, args)\n error.usage_text = self.usage_text\n error.option_sections = self.option_sections\n error.msg = message\n logger.debug('re-raise %r', error)\n raise error\n"
] |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
# remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.clone_exception
|
python
|
def clone_exception(error, args):
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
|
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L425-L454
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.help_handler
|
python
|
def help_handler(docpie, flag):
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
|
Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L457-L502
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.to_dict
|
python
|
def to_dict(self): # cls, self):
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
|
Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L547-L597
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.from_dict
|
python
|
def from_dict(cls, dic):
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
|
Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L602-L651
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.set_config
|
python
|
def set_config(self, **config):
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
|
Shadow all the current config.
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L655-L714
|
[
"def _init(self):\n uparser = UsageParser(\n self.usage_name, self.case_sensitive,\n self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)\n oparser = OptionParser(\n self.option_name, self.case_sensitive,\n self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)\n\n uparser.parse_content(self.doc)\n self.usage_text = usage_text = uparser.raw_content\n # avoid usage contains \"Options:\" word\n if usage_text is None:\n assert self.usage_name.lower() not in self.doc.lower()\n raise DocpieError(\n 'usage title %r not found in doc' % (self.usage_name,)\n )\n prefix, _, suffix = self.doc.partition(usage_text)\n\n oparser.parse(prefix + suffix)\n self.option_sections = oparser.raw_content\n self.options = oparser.instances\n\n uparser.parse(None, self.name, self.options)\n self.usages = uparser.instances\n\n self.opt_names_required_max_args = {}\n\n for opt_ins in uparser.all_options:\n if opt_ins.ref:\n # max_arg = max(opt_ins.arg_range())\n max_arg = max(opt_ins.ref.arg_range())\n else:\n max_arg = 0\n\n for each_name in opt_ins.names:\n self.opt_names_required_max_args[each_name] = max_arg\n\n self.opt_names = []\n for options in self.options.values():\n for each_option in options:\n self.opt_names.append(each_option[0].names)\n\n self.set_config(help=self.help,\n version=self.version,\n extra=dict(self.extra))\n",
"def _formal_extra(self, extra):\n result = {}\n for keys, value in extra.items():\n if isinstance(keys, StrType):\n keys = [keys]\n\n result.update((k, value) for k in keys)\n\n return result\n",
"def _set_or_remove_extra_handler(self, set_handler, find_order, handler):\n for flag in find_order:\n alias = self.find_flag_alias(flag)\n if alias is not None:\n alias.add(flag)\n for each in alias:\n if set_handler:\n logger.debug('set %s hanlder %s', each, handler)\n self.extra[each] = handler\n else:\n logger.debug('remove %s hanlder', each)\n _hdlr = self.extra.pop(each, None)\n logger.debug('%s handler %s removed', each, _hdlr)\n break\n"
] |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.find_flag_alias
|
python
|
def find_flag_alias(self, flag):
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
|
Return alias set of a flag; return None if flag is not defined in
"Options".
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L749-L758
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.set_auto_handler
|
python
|
def set_auto_handler(self, flag, handler):
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
|
Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L760-L775
|
[
"def find_flag_alias(self, flag):\n \"\"\"Return alias set of a flag; return None if flag is not defined in\n \"Options\".\n \"\"\"\n for each in self.opt_names:\n if flag in each:\n result = set(each) # a copy\n result.remove(flag)\n return result\n return None\n"
] |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def preview(self, stream=sys.stdout):
"""A quick preview of docpie. Print all the parsed object"""
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/pie.py
|
Docpie.preview
|
python
|
def preview(self, stream=sys.stdout):
write = stream.write
write(('[Quick preview of Docpie %s]' % self._version).center(80, '='))
write('\n')
write(' sections '.center(80, '-'))
write('\n')
write(self.usage_text)
write('\n')
option_sections = self.option_sections
if option_sections:
write('\n')
write('\n'.join(option_sections.values()))
write('\n')
write(' str '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %s\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %s\n' % each)
write('\n')
write(' repr '.center(80, '-'))
write('\n[%s]\n' % self.usage_name)
for each in self.usages:
write(' %r\n' % each)
write('\n[Options:]\n\n')
for title, sections in self.options.items():
if title:
full_title = '%s %s' % (title, self.option_name)
else:
full_title = self.option_name
write(full_title)
write('\n')
for each in sections:
write(' %r\n' % each)
write('\n')
write(' auto handlers '.center(80, '-'))
write('\n')
for key, value in self.extra.items():
write('%s %s\n' % (key, value))
|
A quick preview of docpie. Print all the parsed object
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/pie.py#L777-L837
| null |
class Docpie(dict):
# Docpie version
# it's not a good idea but it can avoid loop importing
_version = '0.4.2'
option_name = 'Options:'
usage_name = 'Usage:'
doc = None
case_sensitive = False
auto2dashes = True
name = None
help = True
helpstyle = 'python'
version = None
stdopt = True
attachopt = True
attachvalue = True
options_first = False
appeared_only = False
extra = {}
namedoptions = False
opt_names = []
opt_names_required_max_args = {}
def __init__(self, doc=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
super(Docpie, self).__init__()
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
if extra is None:
extra = {}
else:
extra = self._formal_extra(extra)
# set config first
self.set_config(
stdopt=stdopt, attachopt=attachopt, attachvalue=attachvalue,
auto2dashes=auto2dashes, name=name, case_sensitive=case_sensitive,
optionsfirst=optionsfirst, appearedonly=appearedonly,
namedoptions=namedoptions)
self.help = help
self.helpstyle = helpstyle
self.version = version
self.extra = extra
if doc is not None:
self.doc = doc
self._init()
def _init(self):
uparser = UsageParser(
self.usage_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
oparser = OptionParser(
self.option_name, self.case_sensitive,
self.stdopt, self.attachopt, self.attachvalue, self.namedoptions)
uparser.parse_content(self.doc)
self.usage_text = usage_text = uparser.raw_content
# avoid usage contains "Options:" word
if usage_text is None:
assert self.usage_name.lower() not in self.doc.lower()
raise DocpieError(
'usage title %r not found in doc' % (self.usage_name,)
)
prefix, _, suffix = self.doc.partition(usage_text)
oparser.parse(prefix + suffix)
self.option_sections = oparser.raw_content
self.options = oparser.instances
uparser.parse(None, self.name, self.options)
self.usages = uparser.instances
self.opt_names_required_max_args = {}
for opt_ins in uparser.all_options:
if opt_ins.ref:
# max_arg = max(opt_ins.arg_range())
max_arg = max(opt_ins.ref.arg_range())
else:
max_arg = 0
for each_name in opt_ins.names:
self.opt_names_required_max_args[each_name] = max_arg
self.opt_names = []
for options in self.options.values():
for each_option in options:
self.opt_names.append(each_option[0].names)
self.set_config(help=self.help,
version=self.version,
extra=dict(self.extra))
def docpie(self, argv=None):
"""match the argv for each usages, return dict.
if argv is None, it will use sys.argv instead.
if argv is str, it will call argv.split() first.
this function will check the options in self.extra and handle it first.
Which means it may not try to match any usages because of the checking.
"""
token = self._prepare_token(argv)
# check first, raise after
# so `-hwhatever` can trigger `-h` first
self.check_flag_and_handler(token)
if token.error is not None:
# raise DocpieExit('%s\n\n%s' % (token.error, help_msg))
self.exception_handler(token.error)
try:
result, dashed = self._match(token)
except DocpieExit as e:
self.exception_handler(e)
# if error is not None:
# self.exception_handler(error)
value = result.get_value(self.appeared_only, False)
self.clear()
self.update(value)
if self.appeared_only:
self._drop_non_appeared()
logger.debug('get all matched value %s', self)
rest = list(self.usages) # a copy
rest.remove(result)
self._add_rest_value(rest)
logger.debug('merged rest values, now %s', self)
self._add_option_value()
self._dashes_value(dashed)
return dict(self) # remove all other reference in this instance
def _drop_non_appeared(self):
for key, _ in filter(lambda k_v: k_v[1] == -1, dict(self).items()):
self.pop(key)
def _add_rest_value(self, rest):
for each in rest:
default_values = each.get_sys_default_value(
self.appeared_only, False)
logger.debug('get rest values %s -> %s', each, default_values)
common_keys = set(self).intersection(default_values)
for key in common_keys:
default = default_values[key]
valued = self[key]
logger.debug('%s: default(%s), matched(%s)',
key, default, valued)
if ((default is not True and default is not False) and
isinstance(default, int)):
valued = int(valued)
elif isinstance(default, list):
if valued is None:
valued = []
elif isinstance(valued, list):
pass
else:
valued = [valued]
logger.debug('set %s as %s', key, valued)
default_values[key] = valued
self.update(default_values)
def _add_option_value(self):
# add left option, add default value
for options in self.options.values():
for each in options:
option = each[0]
names = option.names
default = option.default
this_value = option.value
logger.debug('%s/%s/%s', option, default, this_value)
name_in_value = names.intersection(self)
if name_in_value: # add default if necessary
one_name = name_in_value.pop()
logger.debug('in names, pop %s, self %s', one_name, self)
value_in_usage = self[one_name]
if not value_in_usage: # need default
if default is None: # no default, use old matched one
final_value = value_in_usage
elif (each.repeat or
(value_in_usage is not True and
value_in_usage is not False and
isinstance(value_in_usage, (int, list)))):
final_value = default.split()
else:
final_value = default
else:
final_value = value_in_usage
if option.ref is None and each.repeat:
final_value = int(final_value or 0)
# just add this key-value.
# Note all option here never been matched
elif self.appeared_only:
continue
else:
ref = option.ref
if default is not None:
if (each.repeat or
(this_value not in (True, False) and
isinstance(this_value, (int, list)))):
final_value = default.split()
else:
if ref is not None and max(ref.arg_range()) > 1:
final_value = default.split()
else:
final_value = default
else:
if ref is not None:
arg_range = ref.arg_range()
# if min(arg_range) != 0:
# # It requires at least a value
# logger.debug('%s expects value', option)
# raise DocpieExit(DocpieException.usage_str)
if max(arg_range) == 1:
final_value = None
else:
assert max(arg_range) > 1
final_value = []
# ref is None
elif this_value is None:
final_value = 0 if each.repeat else False
else:
final_value = \
int(this_value) if each.repeat else this_value
logger.debug('set %s value %s', names, final_value)
final = {}
for name in names:
final[name] = final_value
self.update(final)
def _dashes_value(self, dashes):
result = self['--'] if '--' in self else dashes
if self.options_first:
if result is True:
result = False
elif result is False:
pass
elif isinstance(result, int):
result = max(0, result - 1)
if self.auto2dashes:
result = bool(result)
self['--'] = result
def _prepare_token(self, argv):
if argv is None:
argv = sys.argv
elif isinstance(argv, StrType):
argv = argv.split()
# the things in extra may not be announced
all_opt_requried_max_args = dict.fromkeys(self.extra, 0)
all_opt_requried_max_args.update(self.opt_names_required_max_args)
token = Argv(argv[1:], self.auto2dashes or self.options_first,
self.stdopt, self.attachopt, self.attachvalue,
all_opt_requried_max_args)
none_or_error = token.formal(self.options_first)
logger.debug('formal token: %s; error: %s', token, none_or_error)
if none_or_error is not None:
return self.exception_handler(none_or_error)
return token
def _match(self, token):
for each in self.usages:
logger.debug('matching usage %s', each)
argv_clone = token.clone()
if each.match(argv_clone, False):
logger.debug('matched usage %s, checking rest argv %s',
each, argv_clone)
if (not argv_clone or
(argv_clone.auto_dashes and
list(argv_clone) == ['--'])):
argv_clone.check_dash()
logger.debug('matched usage %s / %s', each, argv_clone)
return each, argv_clone.dashes
logger.debug('matching %s left %s, checking failed',
each, argv_clone)
each.reset()
logger.debug('failed matching usage %s / %s', each, argv_clone)
else:
logger.debug('none matched')
raise DocpieExit(None)
def check_flag_and_handler(self, token):
need_arg = [name for name, expect in
self.opt_names_required_max_args.items() if expect != 0]
options = set()
for ele in token:
if self.auto2dashes and ele == '--':
break
if ele.startswith('-') and ele != '-':
options.add(ele)
for inputted in options:
found = False
for auto, handler in self.extra.items():
if not callable(handler):
continue
if auto.startswith('--') and inputted.startswith('--'):
logger.debug('check %s for %s', inputted, auto)
if '=' in inputted:
inputted = inputted.split('=', 1)[0]
if inputted == auto:
found = True
break
elif auto[1] != '-' and inputted[1] != '-':
logger.debug('check %s for %s', inputted, auto)
if self.stdopt:
attachopt = self.attachopt
break_upper = False
for index, attached_name in enumerate(inputted[1:]):
if not attachopt and index > 0:
break
logger.debug(
'check %s for %s', attached_name, auto
)
stacked_name = '-' + attached_name
if stacked_name == auto:
found = True
logger.debug('find %s in %s', auto, inputted)
if stacked_name in need_arg:
break_upper = True
break
if found or break_upper: # break upper loop
break
else:
found = (inputted == auto)
if found:
logger.debug('find %s, auto handle it', auto)
handler(self, auto)
def exception_handler(self, error):
logger.debug('handling %r', error)
if self.option_sections:
help_msg = ('%s\n\n%s' %
(self.usage_text.rstrip(),
'\n'.join(self.option_sections.values())))
else:
help_msg = self.usage_text
helpstyle = self.helpstyle
if helpstyle == 'python':
if self.option_sections: # option section will help dedent
formated_help_msg = self.help_style_python(help_msg)
else: # only need to dedent it
formated_help_msg = self.help_style_dedent(help_msg)
elif helpstyle == 'dedent':
formated_help_msg = self.help_style_dedent(help_msg)
else:
formated_help_msg = help_msg
args = list(error.args)
message = args[0]
if message is not None:
formated_help_msg = '%s\n\n%s' % (message, formated_help_msg)
# remove `\n` because `raise` will auto add
args[0] = formated_help_msg.rstrip()
error = self.clone_exception(error, args)
error.usage_text = self.usage_text
error.option_sections = self.option_sections
error.msg = message
logger.debug('re-raise %r', error)
raise error
@staticmethod
def clone_exception(error, args):
"""
return a new cloned error
when do:
```
try:
do_sth()
except BaseException as e:
handle(e)
def handle(error):
# do sth with error
raise e # <- won't work!
This can generate a new cloned error of the same class
Parameters
----------
error: the caught error
args: the new args to init the cloned error
Returns
-------
new error of the same class
"""
new_error = error.__class__(*args)
new_error.__dict__ = error.__dict__
return new_error
@staticmethod
def help_handler(docpie, flag):
"""Default help(`--help`, `-h`) handler. print help string and exit.
when help = 'short_brief', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only.
when help = 'short_brief_notice', flag startswith `--` will print
the full `doc`, `-` for "Usage" section and "Option" section only,
with a message.
"Use `--help` to see the full help messsage" in the end
otherwith(default), print the full `doc`
"""
help_type = docpie.help
helpstyle = docpie.helpstyle
if helpstyle == 'python':
doc = Docpie.help_style_python(docpie.doc)
elif helpstyle == 'dedent':
doc = Docpie.help_style_dedent(docpie.doc)
# elif help_style == 'raw':
# doc = Docpie.help_style_raw(docpie.doc)
else:
doc = docpie.doc
if help_type == 'short_brief':
if flag.startswith('--'):
print(doc)
else:
print(docpie.usage_text.rstrip())
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()))
elif help_type == 'short_brief_notice':
if flag.startswith('--'):
sys.stdout.write(doc)
else:
print(docpie.usage_text)
option_sections = docpie.option_sections
if option_sections:
print('')
print('\n'.join(option_sections.values()).rstrip())
print('')
print('Use `--help` to see the full help messsage.')
else:
sys.stdout.write(doc)
sys.exit()
@staticmethod
def help_style_python(docstring):
if not docstring:
return '\n'
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# indent = sys.maxint
indent = None
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
if indent is None:
indent = len(line) - len(stripped)
else:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
# if indent < sys.maxint:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed) + '\n'
@staticmethod
def help_style_dedent(docstring):
return textwrap.dedent(docstring)
@staticmethod
def version_handler(docpie, flag):
"""Default `-v` and `--version` handler. print the verison and exit."""
print(docpie.version)
sys.exit()
# Because it's divided from dict
# json.dump(docpie, default=docpie.convert_2_dict) won't work
# so convert to dict before JSONlizing
def to_dict(self): # cls, self):
"""Convert Docpie into a JSONlizable dict.
Use it in this way:
pie = Docpie(__doc__)
json.dumps(pie.convert_2_dict())
Note the `extra` info will be lost if you costomize that,
because a function is not JSONlizable.
You can use `set_config(extra={...})` to set it back.
"""
config = {
'stdopt': self.stdopt,
'attachopt': self.attachopt,
'attachvalue': self.attachvalue,
'auto2dashes': self.auto2dashes,
'case_sensitive': self.case_sensitive,
'namedoptions': self.namedoptions,
'appearedonly': self.appeared_only,
'optionsfirst': self.options_first,
'option_name': self.option_name,
'usage_name': self.usage_name,
'name': self.name,
'help': self.help,
'version': self.version
}
text = {
'doc': self.doc,
'usage_text': self.usage_text,
'option_sections': self.option_sections,
}
# option = [convert_2_dict(x) for x in self.options]
option = {}
for title, options in self.options.items():
option[title] = [convert_2_dict(x) for x in options]
usage = [convert_2_dict(x) for x in self.usages]
return {
'__version__': self._version,
'__class__': 'Docpie',
'__config__': config,
'__text__': text,
'option': option,
'usage': usage,
'option_names': [list(x) for x in self.opt_names],
'opt_names_required_max_args': self.opt_names_required_max_args
}
convert_2_dict = convert_to_dict = to_dict
@classmethod
def from_dict(cls, dic):
"""Convert dict generated by `convert_2_dict` into Docpie instance
You can do this:
pie = Docpie(__doc__)
clone_pie = json.loads(pie.convert_2_docpie(
json.dumps(pie.convert_2_dict())
))
Note if you changed `extra`, it will be lost.
You can use `set_config(extra={...})` to set it back.
"""
if '__version__' not in dic:
raise ValueError('Not support old docpie data')
data_version = int(dic['__version__'].replace('.', ''))
this_version = int(cls._version.replace('.', ''))
logger.debug('this: %s, old: %s', this_version, data_version)
if data_version < this_version:
raise ValueError('Not support old docpie data')
assert dic['__class__'] == 'Docpie'
config = dic['__config__']
help = config.pop('help')
version = config.pop('version')
option_name = config.pop('option_name')
usage_name = config.pop('usage_name')
self = cls(None, **config)
self.option_name = option_name
self.usage_name = usage_name
text = dic['__text__']
self.doc = text['doc']
self.usage_text = text['usage_text']
self.option_sections = text['option_sections']
self.opt_names = [set(x) for x in dic['option_names']]
self.opt_names_required_max_args = dic['opt_names_required_max_args']
self.set_config(help=help, version=version)
self.options = o = {}
for title, options in dic['option'].items():
opt_ins = [convert_2_object(x, {}, self.namedoptions)
for x in options]
o[title] = opt_ins
self.usages = [convert_2_object(x, self.options, self.namedoptions)
for x in dic['usage']]
return self
convert_2_docpie = convert_to_docpie = from_dict
def set_config(self, **config):
"""Shadow all the current config."""
reinit = False
if 'stdopt' in config:
stdopt = config.pop('stdopt')
reinit = (stdopt != self.stdopt)
self.stdopt = stdopt
if 'attachopt' in config:
attachopt = config.pop('attachopt')
reinit = reinit or (attachopt != self.attachopt)
self.attachopt = attachopt
if 'attachvalue' in config:
attachvalue = config.pop('attachvalue')
reinit = reinit or (attachvalue != self.attachvalue)
self.attachvalue = attachvalue
if 'auto2dashes' in config:
self.auto2dashes = config.pop('auto2dashes')
if 'name' in config:
name = config.pop('name')
reinit = reinit or (name != self.name)
self.name = name
if 'help' in config:
self.help = config.pop('help')
self._set_or_remove_extra_handler(
self.help, ('--help', '-h'), self.help_handler)
if 'version' in config:
self.version = config.pop('version')
self._set_or_remove_extra_handler(
self.version is not None,
('--version', '-v'),
self.version_handler)
if 'case_sensitive' in config:
case_sensitive = config.pop('case_sensitive')
reinit = reinit or (case_sensitive != self.case_sensitive)
self.case_sensitive = case_sensitive
if 'optionsfirst' in config:
self.options_first = config.pop('optionsfirst')
if 'appearedonly' in config:
self.appeared_only = config.pop('appearedonly')
if 'namedoptions' in config:
namedoptions = config.pop('namedoptions')
reinit = reinit or (namedoptions != self.namedoptions)
self.namedoptions = namedoptions
if 'extra' in config:
self.extra.update(self._formal_extra(config.pop('extra')))
if config: # should be empty
raise ValueError(
'`%s` %s not accepted key argument%s' % (
'`, `'.join(config),
'is' if len(config) == 1 else 'are',
'' if len(config) == 1 else 's'
))
if self.doc is not None and reinit:
logger.warning(
'You changed the config that requires re-initialized'
' `Docpie` object. Create a new one instead'
)
self._init()
def _formal_extra(self, extra):
result = {}
for keys, value in extra.items():
if isinstance(keys, StrType):
keys = [keys]
result.update((k, value) for k in keys)
return result
def _set_or_remove_extra_handler(self, set_handler, find_order, handler):
for flag in find_order:
alias = self.find_flag_alias(flag)
if alias is not None:
alias.add(flag)
for each in alias:
if set_handler:
logger.debug('set %s hanlder %s', each, handler)
self.extra[each] = handler
else:
logger.debug('remove %s hanlder', each)
_hdlr = self.extra.pop(each, None)
logger.debug('%s handler %s removed', each, _hdlr)
break
else:
for flag in find_order:
if set_handler:
logger.debug('set %s hanlder', flag)
self.extra[flag] = handler
else:
logger.debug('remove %s hanlder', flag)
self.extra.pop(flag, None)
def find_flag_alias(self, flag):
"""Return alias set of a flag; return None if flag is not defined in
"Options".
"""
for each in self.opt_names:
if flag in each:
result = set(each) # a copy
result.remove(flag)
return result
return None
def set_auto_handler(self, flag, handler):
"""Set pre-auto-handler for a flag.
the handler must accept two argument: first the `pie` which
referent to the current `Docpie` instance, second, the `flag`
which is the flag found in `argv`.
Different from `extra` argument, this will set the alias
option you defined in `Option` section with the same
behavior.
"""
assert flag.startswith('-') and flag not in ('-', '--')
alias = self.find_flag_alias(flag) or []
self.extra[flag] = handler
for each in alias:
self.extra[each] = handler
def __str__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
|
TylerTemp/docpie
|
docpie/parser.py
|
OptionParser.parse_content
|
python
|
def parse_content(self, text):
"""parse section to formal format
raw_content: {title: section(with title)}. For `help` access.
formal_content: {title: section} but the section has been dedented
without title. For parse instance"""
raw_content = self.raw_content
raw_content.clear()
formal_collect = {}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split = self.visible_empty_line_re.split(text)
except ValueError: # python >= 3.5
split = [text]
option_split_re = self.option_split_re
name = re.compile(re.escape(self.option_name), re.IGNORECASE)
for text in filter(lambda x: x and x.strip(), split):
# logger.warning('get options group:\n%r', text)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split_options = option_split_re.split(text)
except ValueError: # python >= 3.5
continue
split_options.pop(0)
for title, section in zip(split_options[::2], split_options[1::2]):
prefix, end = name.split(title)
prefix = prefix.strip()
section = section.rstrip()
if end.endswith('\n'):
formal = section
else:
formal = ' ' * len(title) + section
formal_collect.setdefault(prefix, []).append(formal)
# logger.error((title, section))
if prefix in raw_content:
# TODO: better handling way?
if self.namedoptions:
log = logger.warning
else:
log = logger.debug
log('duplicated options section %s', prefix)
raw_content[prefix] += '\n%s%s' % (title, section)
else:
raw_content[prefix] = title + section
if formal_collect:
for each_title, values in formal_collect.items():
value = '\n'.join(map(textwrap.dedent, values))
formal_collect[each_title] = value
self.formal_content = formal_collect
|
parse section to formal format
raw_content: {title: section(with title)}. For `help` access.
formal_content: {title: section} but the section has been dedented
without title. For parse instance
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/parser.py#L413-L476
| null |
class OptionParser(Parser):
split_re = re.compile(r'(<.*?>)|\s+')
wrap_symbol_re = re.compile(r'([\|\[\]\(\)]|\.\.\.)')
line_re = re.compile(r'^(?P<indent> *)'
r'(?P<option>[\d\w=_, <>\-\[\]\.]+?)'
r'(?P<separater>$| $| {2,})'
r'(?P<description>.*?)'
r' *$',
flags=re.IGNORECASE)
indent_re = re.compile(r'^(?P<indent> *)')
to_space_re = re.compile(r',\s?|=')
visible_empty_line_re = re.compile(r'^\s*?\n*|\r?\n(:?[\ \t]*\r?\n)+',
flags=re.DOTALL)
option_split_re_str = (r'([^\r\n]*{0}[\ \t]*\r?\n?)')
# split_re = re.compile(r'(<.*?>)|\s?')
# default ::= chars "[default: " chars "]"
# support xxxxxx.[default: ]
# support xxxxxx.[default: yes]
# not support xxxxx[default: no].
# not support xxxxx[default: no]!
# If you want to match a not so strict format, this may help:
# default_re = re.compile(r'\[default: *(?P<default>.*?) *\]'
# r' *'
# r'[\.\?\!]? *$',
# flags=re.IGNORECASE)
default_re = re.compile(r'\[default: (?P<default>.*?)\] *$',
flags=re.IGNORECASE)
def __init__(self, option_name, case_sensitive,
stdopt, attachopt, attachvalue, namedoptions):
self.stdopt = stdopt
self.attachopt = attachopt
self.attachvalue = attachvalue
self.case_sensitive = case_sensitive
self.option_name = option_name
self.option_split_re = re.compile(
self.option_split_re_str.format(option_name),
flags=re.DOTALL if case_sensitive else (re.DOTALL | re.IGNORECASE)
)
self.raw_content = {}
self.formal_content = None
self.name_2_instance = {}
self.namedoptions = namedoptions
# if text is None or not text.strip(): # empty
# self._opt_and_default_str = []
# else:
# self._opt_and_default_str = list(self._parse_text(text))
#
# self._chain = self._parse_to_instance(self._opt_and_default_str)
def parse(self, text):
self.parse_content(text)
title_names_and_default = self.parse_names_and_default()
self.instances = self.parse_to_instance(title_names_and_default)
def parse_content(self, text):
"""parse section to formal format
raw_content: {title: section(with title)}. For `help` access.
formal_content: {title: section} but the section has been dedented
without title. For parse instance"""
raw_content = self.raw_content
raw_content.clear()
formal_collect = {}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split = self.visible_empty_line_re.split(text)
except ValueError: # python >= 3.5
split = [text]
option_split_re = self.option_split_re
name = re.compile(re.escape(self.option_name), re.IGNORECASE)
for text in filter(lambda x: x and x.strip(), split):
# logger.warning('get options group:\n%r', text)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split_options = option_split_re.split(text)
except ValueError: # python >= 3.5
continue
split_options.pop(0)
for title, section in zip(split_options[::2], split_options[1::2]):
prefix, end = name.split(title)
prefix = prefix.strip()
section = section.rstrip()
if end.endswith('\n'):
formal = section
else:
formal = ' ' * len(title) + section
formal_collect.setdefault(prefix, []).append(formal)
# logger.error((title, section))
if prefix in raw_content:
# TODO: better handling way?
if self.namedoptions:
log = logger.warning
else:
log = logger.debug
log('duplicated options section %s', prefix)
raw_content[prefix] += '\n%s%s' % (title, section)
else:
raw_content[prefix] = title + section
if formal_collect:
for each_title, values in formal_collect.items():
value = '\n'.join(map(textwrap.dedent, values))
formal_collect[each_title] = value
self.formal_content = formal_collect
def parse_names_and_default(self):
"""parse for `parse_content`
{title: [('-a, --all=STH', 'default'), ...]}"""
result = {}
for title, text in self.formal_content.items():
if not text:
result[title] = []
continue
logger.debug('\n' + text)
collect = []
to_list = text.splitlines()
# parse first line. Should NEVER failed.
# this will ensure in `[default: xxx]`,
# the `xxx`(e.g: `\t`, `,`) will not be changed by _format_line
previous_line = to_list.pop(0)
collect.append(self.parse_line_option_indent(previous_line))
for line in to_list:
indent_match = self.indent_re.match(line)
this_indent = len(indent_match.groupdict()['indent'])
if this_indent >= collect[-1]['indent']:
# A multi line description
previous_line = line
continue
# new option line
# deal the default for previous option
collect[-1]['default'] = self.parse_default(previous_line)
# deal this option
collect.append(self.parse_line_option_indent(line))
logger.debug(collect[-1])
previous_line = line
else:
collect[-1]['default'] = self.parse_default(previous_line)
result[title] = [
(each['option'], each['default']) for each in collect]
return result
spaces_re = re.compile(r'(\ \ \s*|\t\s*)')
@classmethod
def cut_first_spaces_outside_bracket(cls, string):
right = cls.spaces_re.split(string)
left = []
if right and right[0] == '': # re matches the start of the string
right.pop(0)
if right and not right[0].strip(): # it is indent
left.append(right.pop(0))
brackets = {'(': 0, '[': 0, '<': 0}
close_brancket = {'(': ')', '[': ']', '<': '>'}
cutted = ''
while right:
this = right.pop(0)
for open_b in brackets:
brackets[open_b] += this.count(open_b)
brackets[open_b] -= this.count(close_brancket[open_b])
if sum(brackets.values()):
left.append(this)
elif (not this.strip() and
len(this.expandtabs()) >= 2):
cutted = this
break
else:
left.append(this)
return ''.join(left), cutted, ''.join(right)
@classmethod
def parse_line_option_indent(cls, line):
opt_str, separater, description_str = \
cls.cut_first_spaces_outside_bracket(line)
logger.debug('%(line)s -> %(opt_str)r, '
'%(separater)r, '
'%(description_str)r' % locals())
if description_str.strip():
indent = len(opt_str.expandtabs()) + len(separater.expandtabs())
logger.debug('indent: %s', indent)
else:
indent = 2 + len(cls.indent_re.match(
opt_str.expandtabs()
).groupdict()['indent'])
logger.debug('indent: %s', indent)
return {'option': opt_str.strip(), 'indent': indent}
@classmethod
def parse_default(cls, line):
m = cls.default_re.search(line)
if m is None:
return None
return m.groupdict()['default']
def parse_to_instance(self, title_of_name_and_default):
"""{title: [Option(), ...]}"""
result = {}
for title, name_and_default in title_of_name_and_default.items():
logger.debug((title, name_and_default))
result[title] = opts = []
for opt_str, default in name_and_default:
logger.debug('%s:%r' % (opt_str, default))
opt, repeat = self.parse_opt_str(opt_str)
opt.default = default
opt_ins = Optional(opt, repeat=repeat)
for name in opt.names:
self.name_2_instance[name] = opt_ins
opts.append(opt_ins)
return result
def split_short_by_cfg(self, option_str):
if self.stdopt:
if (not option_str.startswith('--') and
len(option_str) > 1):
return option_str[:2], option_str[2:]
return option_str, ''
def parse_opt_str(self, opt):
repeat = False
# -sth=<goes> ON -> -sth, <goes>, ON
opt_lis = self.opt_str_to_list(opt)
logger.debug('%r -> %s' % (opt, opt_lis))
first = opt_lis.pop(0)
if not first.startswith('-'):
raise DocpieError('option %s does not start with "-"' % first)
# if self.stdopt:
# -sth -> name=-s, value=th
# else:
# -sth -> name=-sth, value=''
name, value = self.split_short_by_cfg(first)
opt_ins = Option(name)
if value == '...':
repeat = True
# -f... <sth>
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
elif value:
args_ins = [Required(Argument(value))]
else:
args_ins = []
if opt_lis and opt_lis[0] == '...':
repeat = True
opt_lis.pop(0)
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
args = [] # store the current args after option
for each in opt_lis:
if each.startswith('-'): # alias
name, value = self.split_short_by_cfg(each)
opt_ins.names.add(name)
if value:
args_ins.append(Required(Argument(value)))
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))
).fix()
if this_arg is not None:
args_ins.append(this_arg)
del args[:]
else:
args.append(each)
else:
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))).fix()
if this_arg is not None:
args_ins.append(this_arg)
# option without any args
if not args_ins:
return opt_ins, repeat
# in Option, there should only have one arg list
# e.g.: -f <file> --file=FILE -> -f/--file (<file>|FILE)
# because the arg name will now be shown, it parsed as:
# -f <file> --file=FILE -> -f/--file (<file>)
current_ins = args_ins.pop(0)
current_range = current_ins.arg_range()
# avoid e.g.: -f <a> <b> --file <c>
for other_ins in args_ins:
this_range = other_ins.arg_range()
if this_range != current_range:
raise DocpieError("%s announced differently (%s, %s)" % (
opt_ins, this_range, current_range))
if len(current_range) > 1:
logger.debug('too many possibilities: '
'option %s expect %s arguments',
name, '/'.join(map(str, current_range)))
# TODO: check if current_ins contain Command(not allowed in fact)
opt_ins.ref = current_ins
return opt_ins, repeat
def opt_str_to_list(self, opt):
dropped_comma_and_equal = opt.replace(',', ' ').replace('=', ' ')
wrapped_space = self.wrap_symbol_re.sub(
r' \1 ', dropped_comma_and_equal)
opt_lis = [x for x in self.split_re.split(wrapped_space) if x]
return opt_lis
|
TylerTemp/docpie
|
docpie/parser.py
|
OptionParser.parse_names_and_default
|
python
|
def parse_names_and_default(self):
"""parse for `parse_content`
{title: [('-a, --all=STH', 'default'), ...]}"""
result = {}
for title, text in self.formal_content.items():
if not text:
result[title] = []
continue
logger.debug('\n' + text)
collect = []
to_list = text.splitlines()
# parse first line. Should NEVER failed.
# this will ensure in `[default: xxx]`,
# the `xxx`(e.g: `\t`, `,`) will not be changed by _format_line
previous_line = to_list.pop(0)
collect.append(self.parse_line_option_indent(previous_line))
for line in to_list:
indent_match = self.indent_re.match(line)
this_indent = len(indent_match.groupdict()['indent'])
if this_indent >= collect[-1]['indent']:
# A multi line description
previous_line = line
continue
# new option line
# deal the default for previous option
collect[-1]['default'] = self.parse_default(previous_line)
# deal this option
collect.append(self.parse_line_option_indent(line))
logger.debug(collect[-1])
previous_line = line
else:
collect[-1]['default'] = self.parse_default(previous_line)
result[title] = [
(each['option'], each['default']) for each in collect]
return result
|
parse for `parse_content`
{title: [('-a, --all=STH', 'default'), ...]}
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/parser.py#L478-L521
|
[
"def parse_line_option_indent(cls, line):\n opt_str, separater, description_str = \\\n cls.cut_first_spaces_outside_bracket(line)\n\n logger.debug('%(line)s -> %(opt_str)r, '\n '%(separater)r, '\n '%(description_str)r' % locals())\n if description_str.strip():\n indent = len(opt_str.expandtabs()) + len(separater.expandtabs())\n logger.debug('indent: %s', indent)\n else:\n indent = 2 + len(cls.indent_re.match(\n opt_str.expandtabs()\n ).groupdict()['indent'])\n logger.debug('indent: %s', indent)\n return {'option': opt_str.strip(), 'indent': indent}\n",
"def parse_default(cls, line):\n m = cls.default_re.search(line)\n if m is None:\n return None\n return m.groupdict()['default']\n"
] |
class OptionParser(Parser):
split_re = re.compile(r'(<.*?>)|\s+')
wrap_symbol_re = re.compile(r'([\|\[\]\(\)]|\.\.\.)')
line_re = re.compile(r'^(?P<indent> *)'
r'(?P<option>[\d\w=_, <>\-\[\]\.]+?)'
r'(?P<separater>$| $| {2,})'
r'(?P<description>.*?)'
r' *$',
flags=re.IGNORECASE)
indent_re = re.compile(r'^(?P<indent> *)')
to_space_re = re.compile(r',\s?|=')
visible_empty_line_re = re.compile(r'^\s*?\n*|\r?\n(:?[\ \t]*\r?\n)+',
flags=re.DOTALL)
option_split_re_str = (r'([^\r\n]*{0}[\ \t]*\r?\n?)')
# split_re = re.compile(r'(<.*?>)|\s?')
# default ::= chars "[default: " chars "]"
# support xxxxxx.[default: ]
# support xxxxxx.[default: yes]
# not support xxxxx[default: no].
# not support xxxxx[default: no]!
# If you want to match a not so strict format, this may help:
# default_re = re.compile(r'\[default: *(?P<default>.*?) *\]'
# r' *'
# r'[\.\?\!]? *$',
# flags=re.IGNORECASE)
default_re = re.compile(r'\[default: (?P<default>.*?)\] *$',
flags=re.IGNORECASE)
def __init__(self, option_name, case_sensitive,
stdopt, attachopt, attachvalue, namedoptions):
self.stdopt = stdopt
self.attachopt = attachopt
self.attachvalue = attachvalue
self.case_sensitive = case_sensitive
self.option_name = option_name
self.option_split_re = re.compile(
self.option_split_re_str.format(option_name),
flags=re.DOTALL if case_sensitive else (re.DOTALL | re.IGNORECASE)
)
self.raw_content = {}
self.formal_content = None
self.name_2_instance = {}
self.namedoptions = namedoptions
# if text is None or not text.strip(): # empty
# self._opt_and_default_str = []
# else:
# self._opt_and_default_str = list(self._parse_text(text))
#
# self._chain = self._parse_to_instance(self._opt_and_default_str)
def parse(self, text):
self.parse_content(text)
title_names_and_default = self.parse_names_and_default()
self.instances = self.parse_to_instance(title_names_and_default)
def parse_content(self, text):
"""parse section to formal format
raw_content: {title: section(with title)}. For `help` access.
formal_content: {title: section} but the section has been dedented
without title. For parse instance"""
raw_content = self.raw_content
raw_content.clear()
formal_collect = {}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split = self.visible_empty_line_re.split(text)
except ValueError: # python >= 3.5
split = [text]
option_split_re = self.option_split_re
name = re.compile(re.escape(self.option_name), re.IGNORECASE)
for text in filter(lambda x: x and x.strip(), split):
# logger.warning('get options group:\n%r', text)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split_options = option_split_re.split(text)
except ValueError: # python >= 3.5
continue
split_options.pop(0)
for title, section in zip(split_options[::2], split_options[1::2]):
prefix, end = name.split(title)
prefix = prefix.strip()
section = section.rstrip()
if end.endswith('\n'):
formal = section
else:
formal = ' ' * len(title) + section
formal_collect.setdefault(prefix, []).append(formal)
# logger.error((title, section))
if prefix in raw_content:
# TODO: better handling way?
if self.namedoptions:
log = logger.warning
else:
log = logger.debug
log('duplicated options section %s', prefix)
raw_content[prefix] += '\n%s%s' % (title, section)
else:
raw_content[prefix] = title + section
if formal_collect:
for each_title, values in formal_collect.items():
value = '\n'.join(map(textwrap.dedent, values))
formal_collect[each_title] = value
self.formal_content = formal_collect
def parse_names_and_default(self):
"""parse for `parse_content`
{title: [('-a, --all=STH', 'default'), ...]}"""
result = {}
for title, text in self.formal_content.items():
if not text:
result[title] = []
continue
logger.debug('\n' + text)
collect = []
to_list = text.splitlines()
# parse first line. Should NEVER failed.
# this will ensure in `[default: xxx]`,
# the `xxx`(e.g: `\t`, `,`) will not be changed by _format_line
previous_line = to_list.pop(0)
collect.append(self.parse_line_option_indent(previous_line))
for line in to_list:
indent_match = self.indent_re.match(line)
this_indent = len(indent_match.groupdict()['indent'])
if this_indent >= collect[-1]['indent']:
# A multi line description
previous_line = line
continue
# new option line
# deal the default for previous option
collect[-1]['default'] = self.parse_default(previous_line)
# deal this option
collect.append(self.parse_line_option_indent(line))
logger.debug(collect[-1])
previous_line = line
else:
collect[-1]['default'] = self.parse_default(previous_line)
result[title] = [
(each['option'], each['default']) for each in collect]
return result
spaces_re = re.compile(r'(\ \ \s*|\t\s*)')
@classmethod
def cut_first_spaces_outside_bracket(cls, string):
right = cls.spaces_re.split(string)
left = []
if right and right[0] == '': # re matches the start of the string
right.pop(0)
if right and not right[0].strip(): # it is indent
left.append(right.pop(0))
brackets = {'(': 0, '[': 0, '<': 0}
close_brancket = {'(': ')', '[': ']', '<': '>'}
cutted = ''
while right:
this = right.pop(0)
for open_b in brackets:
brackets[open_b] += this.count(open_b)
brackets[open_b] -= this.count(close_brancket[open_b])
if sum(brackets.values()):
left.append(this)
elif (not this.strip() and
len(this.expandtabs()) >= 2):
cutted = this
break
else:
left.append(this)
return ''.join(left), cutted, ''.join(right)
@classmethod
def parse_line_option_indent(cls, line):
opt_str, separater, description_str = \
cls.cut_first_spaces_outside_bracket(line)
logger.debug('%(line)s -> %(opt_str)r, '
'%(separater)r, '
'%(description_str)r' % locals())
if description_str.strip():
indent = len(opt_str.expandtabs()) + len(separater.expandtabs())
logger.debug('indent: %s', indent)
else:
indent = 2 + len(cls.indent_re.match(
opt_str.expandtabs()
).groupdict()['indent'])
logger.debug('indent: %s', indent)
return {'option': opt_str.strip(), 'indent': indent}
@classmethod
def parse_default(cls, line):
m = cls.default_re.search(line)
if m is None:
return None
return m.groupdict()['default']
def parse_to_instance(self, title_of_name_and_default):
"""{title: [Option(), ...]}"""
result = {}
for title, name_and_default in title_of_name_and_default.items():
logger.debug((title, name_and_default))
result[title] = opts = []
for opt_str, default in name_and_default:
logger.debug('%s:%r' % (opt_str, default))
opt, repeat = self.parse_opt_str(opt_str)
opt.default = default
opt_ins = Optional(opt, repeat=repeat)
for name in opt.names:
self.name_2_instance[name] = opt_ins
opts.append(opt_ins)
return result
def split_short_by_cfg(self, option_str):
if self.stdopt:
if (not option_str.startswith('--') and
len(option_str) > 1):
return option_str[:2], option_str[2:]
return option_str, ''
def parse_opt_str(self, opt):
repeat = False
# -sth=<goes> ON -> -sth, <goes>, ON
opt_lis = self.opt_str_to_list(opt)
logger.debug('%r -> %s' % (opt, opt_lis))
first = opt_lis.pop(0)
if not first.startswith('-'):
raise DocpieError('option %s does not start with "-"' % first)
# if self.stdopt:
# -sth -> name=-s, value=th
# else:
# -sth -> name=-sth, value=''
name, value = self.split_short_by_cfg(first)
opt_ins = Option(name)
if value == '...':
repeat = True
# -f... <sth>
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
elif value:
args_ins = [Required(Argument(value))]
else:
args_ins = []
if opt_lis and opt_lis[0] == '...':
repeat = True
opt_lis.pop(0)
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
args = [] # store the current args after option
for each in opt_lis:
if each.startswith('-'): # alias
name, value = self.split_short_by_cfg(each)
opt_ins.names.add(name)
if value:
args_ins.append(Required(Argument(value)))
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))
).fix()
if this_arg is not None:
args_ins.append(this_arg)
del args[:]
else:
args.append(each)
else:
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))).fix()
if this_arg is not None:
args_ins.append(this_arg)
# option without any args
if not args_ins:
return opt_ins, repeat
# in Option, there should only have one arg list
# e.g.: -f <file> --file=FILE -> -f/--file (<file>|FILE)
# because the arg name will now be shown, it parsed as:
# -f <file> --file=FILE -> -f/--file (<file>)
current_ins = args_ins.pop(0)
current_range = current_ins.arg_range()
# avoid e.g.: -f <a> <b> --file <c>
for other_ins in args_ins:
this_range = other_ins.arg_range()
if this_range != current_range:
raise DocpieError("%s announced differently (%s, %s)" % (
opt_ins, this_range, current_range))
if len(current_range) > 1:
logger.debug('too many possibilities: '
'option %s expect %s arguments',
name, '/'.join(map(str, current_range)))
# TODO: check if current_ins contain Command(not allowed in fact)
opt_ins.ref = current_ins
return opt_ins, repeat
def opt_str_to_list(self, opt):
dropped_comma_and_equal = opt.replace(',', ' ').replace('=', ' ')
wrapped_space = self.wrap_symbol_re.sub(
r' \1 ', dropped_comma_and_equal)
opt_lis = [x for x in self.split_re.split(wrapped_space) if x]
return opt_lis
|
TylerTemp/docpie
|
docpie/parser.py
|
OptionParser.parse_to_instance
|
python
|
def parse_to_instance(self, title_of_name_and_default):
"""{title: [Option(), ...]}"""
result = {}
for title, name_and_default in title_of_name_and_default.items():
logger.debug((title, name_and_default))
result[title] = opts = []
for opt_str, default in name_and_default:
logger.debug('%s:%r' % (opt_str, default))
opt, repeat = self.parse_opt_str(opt_str)
opt.default = default
opt_ins = Optional(opt, repeat=repeat)
for name in opt.names:
self.name_2_instance[name] = opt_ins
opts.append(opt_ins)
return result
|
{title: [Option(), ...]}
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/parser.py#L577-L592
|
[
"def parse_opt_str(self, opt):\n\n repeat = False\n\n # -sth=<goes> ON -> -sth, <goes>, ON\n opt_lis = self.opt_str_to_list(opt)\n logger.debug('%r -> %s' % (opt, opt_lis))\n\n first = opt_lis.pop(0)\n if not first.startswith('-'):\n raise DocpieError('option %s does not start with \"-\"' % first)\n\n # if self.stdopt:\n # -sth -> name=-s, value=th\n # else:\n # -sth -> name=-sth, value=''\n name, value = self.split_short_by_cfg(first)\n opt_ins = Option(name)\n if value == '...':\n repeat = True\n # -f... <sth>\n if opt_lis and not opt_lis[0].startswith('-'):\n raise DocpieError(\n 'option \"%s\" has argument following \"...\"', opt)\n elif value:\n args_ins = [Required(Argument(value))]\n else:\n args_ins = []\n\n if opt_lis and opt_lis[0] == '...':\n repeat = True\n opt_lis.pop(0)\n if opt_lis and not opt_lis[0].startswith('-'):\n raise DocpieError(\n 'option \"%s\" has argument following \"...\"', opt)\n\n args = [] # store the current args after option\n for each in opt_lis:\n if each.startswith('-'): # alias\n name, value = self.split_short_by_cfg(each)\n opt_ins.names.add(name)\n if value:\n args_ins.append(Required(Argument(value)))\n if args: # trun it into instance\n if args[0] == '...':\n if len(args) != 1:\n raise DocpieError(\n 'Error in %s: \"...\" followed by non option',\n opt)\n repeat = True\n else:\n this_arg = Required(\n *self.parse_pattern(Token(args))\n ).fix()\n if this_arg is not None:\n args_ins.append(this_arg)\n del args[:]\n else:\n args.append(each)\n else:\n if args: # trun it into instance\n if args[0] == '...':\n if len(args) != 1:\n raise DocpieError(\n 'Error in %s: \"...\" followed by non option',\n opt)\n repeat = True\n else:\n this_arg = Required(\n *self.parse_pattern(Token(args))).fix()\n if this_arg is not None:\n args_ins.append(this_arg)\n\n # option without any args\n if not args_ins:\n return opt_ins, repeat\n\n # in Option, there should only have one arg list\n # e.g.: -f <file> --file=FILE -> -f/--file (<file>|FILE)\n # because the arg name will now be shown, it parsed as:\n # -f <file> --file=FILE -> -f/--file (<file>)\n current_ins = args_ins.pop(0)\n current_range = current_ins.arg_range()\n\n # avoid e.g.: -f <a> <b> --file <c>\n for other_ins in args_ins:\n this_range = other_ins.arg_range()\n if this_range != current_range:\n raise DocpieError(\"%s announced differently (%s, %s)\" % (\n opt_ins, this_range, current_range))\n\n if len(current_range) > 1:\n logger.debug('too many possibilities: '\n 'option %s expect %s arguments',\n name, '/'.join(map(str, current_range)))\n\n # TODO: check if current_ins contain Command(not allowed in fact)\n opt_ins.ref = current_ins\n return opt_ins, repeat\n"
] |
class OptionParser(Parser):
split_re = re.compile(r'(<.*?>)|\s+')
wrap_symbol_re = re.compile(r'([\|\[\]\(\)]|\.\.\.)')
line_re = re.compile(r'^(?P<indent> *)'
r'(?P<option>[\d\w=_, <>\-\[\]\.]+?)'
r'(?P<separater>$| $| {2,})'
r'(?P<description>.*?)'
r' *$',
flags=re.IGNORECASE)
indent_re = re.compile(r'^(?P<indent> *)')
to_space_re = re.compile(r',\s?|=')
visible_empty_line_re = re.compile(r'^\s*?\n*|\r?\n(:?[\ \t]*\r?\n)+',
flags=re.DOTALL)
option_split_re_str = (r'([^\r\n]*{0}[\ \t]*\r?\n?)')
# split_re = re.compile(r'(<.*?>)|\s?')
# default ::= chars "[default: " chars "]"
# support xxxxxx.[default: ]
# support xxxxxx.[default: yes]
# not support xxxxx[default: no].
# not support xxxxx[default: no]!
# If you want to match a not so strict format, this may help:
# default_re = re.compile(r'\[default: *(?P<default>.*?) *\]'
# r' *'
# r'[\.\?\!]? *$',
# flags=re.IGNORECASE)
default_re = re.compile(r'\[default: (?P<default>.*?)\] *$',
flags=re.IGNORECASE)
def __init__(self, option_name, case_sensitive,
stdopt, attachopt, attachvalue, namedoptions):
self.stdopt = stdopt
self.attachopt = attachopt
self.attachvalue = attachvalue
self.case_sensitive = case_sensitive
self.option_name = option_name
self.option_split_re = re.compile(
self.option_split_re_str.format(option_name),
flags=re.DOTALL if case_sensitive else (re.DOTALL | re.IGNORECASE)
)
self.raw_content = {}
self.formal_content = None
self.name_2_instance = {}
self.namedoptions = namedoptions
# if text is None or not text.strip(): # empty
# self._opt_and_default_str = []
# else:
# self._opt_and_default_str = list(self._parse_text(text))
#
# self._chain = self._parse_to_instance(self._opt_and_default_str)
def parse(self, text):
self.parse_content(text)
title_names_and_default = self.parse_names_and_default()
self.instances = self.parse_to_instance(title_names_and_default)
def parse_content(self, text):
"""parse section to formal format
raw_content: {title: section(with title)}. For `help` access.
formal_content: {title: section} but the section has been dedented
without title. For parse instance"""
raw_content = self.raw_content
raw_content.clear()
formal_collect = {}
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split = self.visible_empty_line_re.split(text)
except ValueError: # python >= 3.5
split = [text]
option_split_re = self.option_split_re
name = re.compile(re.escape(self.option_name), re.IGNORECASE)
for text in filter(lambda x: x and x.strip(), split):
# logger.warning('get options group:\n%r', text)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
split_options = option_split_re.split(text)
except ValueError: # python >= 3.5
continue
split_options.pop(0)
for title, section in zip(split_options[::2], split_options[1::2]):
prefix, end = name.split(title)
prefix = prefix.strip()
section = section.rstrip()
if end.endswith('\n'):
formal = section
else:
formal = ' ' * len(title) + section
formal_collect.setdefault(prefix, []).append(formal)
# logger.error((title, section))
if prefix in raw_content:
# TODO: better handling way?
if self.namedoptions:
log = logger.warning
else:
log = logger.debug
log('duplicated options section %s', prefix)
raw_content[prefix] += '\n%s%s' % (title, section)
else:
raw_content[prefix] = title + section
if formal_collect:
for each_title, values in formal_collect.items():
value = '\n'.join(map(textwrap.dedent, values))
formal_collect[each_title] = value
self.formal_content = formal_collect
def parse_names_and_default(self):
"""parse for `parse_content`
{title: [('-a, --all=STH', 'default'), ...]}"""
result = {}
for title, text in self.formal_content.items():
if not text:
result[title] = []
continue
logger.debug('\n' + text)
collect = []
to_list = text.splitlines()
# parse first line. Should NEVER failed.
# this will ensure in `[default: xxx]`,
# the `xxx`(e.g: `\t`, `,`) will not be changed by _format_line
previous_line = to_list.pop(0)
collect.append(self.parse_line_option_indent(previous_line))
for line in to_list:
indent_match = self.indent_re.match(line)
this_indent = len(indent_match.groupdict()['indent'])
if this_indent >= collect[-1]['indent']:
# A multi line description
previous_line = line
continue
# new option line
# deal the default for previous option
collect[-1]['default'] = self.parse_default(previous_line)
# deal this option
collect.append(self.parse_line_option_indent(line))
logger.debug(collect[-1])
previous_line = line
else:
collect[-1]['default'] = self.parse_default(previous_line)
result[title] = [
(each['option'], each['default']) for each in collect]
return result
spaces_re = re.compile(r'(\ \ \s*|\t\s*)')
@classmethod
def cut_first_spaces_outside_bracket(cls, string):
right = cls.spaces_re.split(string)
left = []
if right and right[0] == '': # re matches the start of the string
right.pop(0)
if right and not right[0].strip(): # it is indent
left.append(right.pop(0))
brackets = {'(': 0, '[': 0, '<': 0}
close_brancket = {'(': ')', '[': ']', '<': '>'}
cutted = ''
while right:
this = right.pop(0)
for open_b in brackets:
brackets[open_b] += this.count(open_b)
brackets[open_b] -= this.count(close_brancket[open_b])
if sum(brackets.values()):
left.append(this)
elif (not this.strip() and
len(this.expandtabs()) >= 2):
cutted = this
break
else:
left.append(this)
return ''.join(left), cutted, ''.join(right)
@classmethod
def parse_line_option_indent(cls, line):
opt_str, separater, description_str = \
cls.cut_first_spaces_outside_bracket(line)
logger.debug('%(line)s -> %(opt_str)r, '
'%(separater)r, '
'%(description_str)r' % locals())
if description_str.strip():
indent = len(opt_str.expandtabs()) + len(separater.expandtabs())
logger.debug('indent: %s', indent)
else:
indent = 2 + len(cls.indent_re.match(
opt_str.expandtabs()
).groupdict()['indent'])
logger.debug('indent: %s', indent)
return {'option': opt_str.strip(), 'indent': indent}
@classmethod
def parse_default(cls, line):
m = cls.default_re.search(line)
if m is None:
return None
return m.groupdict()['default']
def parse_to_instance(self, title_of_name_and_default):
"""{title: [Option(), ...]}"""
result = {}
for title, name_and_default in title_of_name_and_default.items():
logger.debug((title, name_and_default))
result[title] = opts = []
for opt_str, default in name_and_default:
logger.debug('%s:%r' % (opt_str, default))
opt, repeat = self.parse_opt_str(opt_str)
opt.default = default
opt_ins = Optional(opt, repeat=repeat)
for name in opt.names:
self.name_2_instance[name] = opt_ins
opts.append(opt_ins)
return result
def split_short_by_cfg(self, option_str):
if self.stdopt:
if (not option_str.startswith('--') and
len(option_str) > 1):
return option_str[:2], option_str[2:]
return option_str, ''
def parse_opt_str(self, opt):
repeat = False
# -sth=<goes> ON -> -sth, <goes>, ON
opt_lis = self.opt_str_to_list(opt)
logger.debug('%r -> %s' % (opt, opt_lis))
first = opt_lis.pop(0)
if not first.startswith('-'):
raise DocpieError('option %s does not start with "-"' % first)
# if self.stdopt:
# -sth -> name=-s, value=th
# else:
# -sth -> name=-sth, value=''
name, value = self.split_short_by_cfg(first)
opt_ins = Option(name)
if value == '...':
repeat = True
# -f... <sth>
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
elif value:
args_ins = [Required(Argument(value))]
else:
args_ins = []
if opt_lis and opt_lis[0] == '...':
repeat = True
opt_lis.pop(0)
if opt_lis and not opt_lis[0].startswith('-'):
raise DocpieError(
'option "%s" has argument following "..."', opt)
args = [] # store the current args after option
for each in opt_lis:
if each.startswith('-'): # alias
name, value = self.split_short_by_cfg(each)
opt_ins.names.add(name)
if value:
args_ins.append(Required(Argument(value)))
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))
).fix()
if this_arg is not None:
args_ins.append(this_arg)
del args[:]
else:
args.append(each)
else:
if args: # trun it into instance
if args[0] == '...':
if len(args) != 1:
raise DocpieError(
'Error in %s: "..." followed by non option',
opt)
repeat = True
else:
this_arg = Required(
*self.parse_pattern(Token(args))).fix()
if this_arg is not None:
args_ins.append(this_arg)
# option without any args
if not args_ins:
return opt_ins, repeat
# in Option, there should only have one arg list
# e.g.: -f <file> --file=FILE -> -f/--file (<file>|FILE)
# because the arg name will now be shown, it parsed as:
# -f <file> --file=FILE -> -f/--file (<file>)
current_ins = args_ins.pop(0)
current_range = current_ins.arg_range()
# avoid e.g.: -f <a> <b> --file <c>
for other_ins in args_ins:
this_range = other_ins.arg_range()
if this_range != current_range:
raise DocpieError("%s announced differently (%s, %s)" % (
opt_ins, this_range, current_range))
if len(current_range) > 1:
logger.debug('too many possibilities: '
'option %s expect %s arguments',
name, '/'.join(map(str, current_range)))
# TODO: check if current_ins contain Command(not allowed in fact)
opt_ins.ref = current_ins
return opt_ins, repeat
def opt_str_to_list(self, opt):
dropped_comma_and_equal = opt.replace(',', ' ').replace('=', ' ')
wrapped_space = self.wrap_symbol_re.sub(
r' \1 ', dropped_comma_and_equal)
opt_lis = [x for x in self.split_re.split(wrapped_space) if x]
return opt_lis
|
TylerTemp/docpie
|
docpie/parser.py
|
UsageParser.set_option_name_2_instance
|
python
|
def set_option_name_2_instance(self, options):
"""{title: {'-a': Option(), '--all': Option()}}"""
title_opt_2_ins = self.titled_opt_to_ins
title_opt_2_ins.clear()
for title, opts in options.items():
title_opt_2_ins[title] = opt_2_ins = {}
for each in opts:
opt_ins = each[0] # get Option inside Optional/Required
for name in opt_ins.names:
opt_2_ins[name] = each
|
{title: {'-a': Option(), '--all': Option()}}
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/parser.py#L761-L770
| null |
class UsageParser(Parser):
angle_bracket_re = re.compile(r'(<.*?>)')
wrap_symbol_re = re.compile(
r'(\[[^\]\s]*?options\]|\.\.\.|\||\[|\]|\(|\))'
)
split_re = re.compile(r'(\[[^\]\s]*?options\]|\S*<.*?>\S*)|\s+')
# will match '-', '--', and
# flag ::= "-" [ "-" ] chars "=<" chars ">"
# it will also match '---flag', so use startswith('---') to check
std_flag_eq_arg_re = re.compile(r'(?P<flag>^-{1,2}[\da-zA-Z_\-]*)'
r'='
r'(?P<arg><.*?>)'
r'$')
usage_re_str = (r'(?:^|\n)'
r'(?P<raw>'
r'(?P<name>[\ \t]*{0}[\ \t]*)'
r'(?P<sep>(\r?\n)?)'
r'(?P<section>.*?)'
r')'
r'\s*'
r'(?:\n\s*\n|\n\s*$|$)')
def __init__(self, usage_name, case_sensitive,
stdopt, attachopt, attachvalue, namedoptions):
self.usage_name = re.escape(usage_name)
self.case_sensitive = case_sensitive
self.stdopt = stdopt
self.attachopt = attachopt
self.attachvalue = attachvalue
self.titled_opt_to_ins = {}
self.options = None
self.raw_content = None
self.formal_content = None
self.instances = None
self.all_options = None
self.namedoptions = namedoptions
# self._chain = self._parse_text(text, name)
def parse(self, text, name, options):
self.options = options
self.set_option_name_2_instance(options)
if text is not None:
self.parse_content(text)
if self.formal_content is None:
raise DocpieError('"Usage:" not found')
self.parse_2_instance(name)
self.fix_option_and_empty()
def set_option_name_2_instance(self, options):
"""{title: {'-a': Option(), '--all': Option()}}"""
title_opt_2_ins = self.titled_opt_to_ins
title_opt_2_ins.clear()
for title, opts in options.items():
title_opt_2_ins[title] = opt_2_ins = {}
for each in opts:
opt_ins = each[0] # get Option inside Optional/Required
for name in opt_ins.names:
opt_2_ins[name] = each
def parse_content(self, text):
"""get Usage section and set to `raw_content`, `formal_content` of no
title and empty-line version"""
match = re.search(
self.usage_re_str.format(self.usage_name),
text,
flags=(re.DOTALL
if self.case_sensitive
else (re.DOTALL | re.IGNORECASE)))
if match is None:
return
dic = match.groupdict()
logger.debug(dic)
self.raw_content = dic['raw']
if dic['sep'] in ('\n', '\r\n'):
self.formal_content = dic['section']
return
reallen = len(dic['name'])
replace = ''.ljust(reallen)
drop_name = match.expand('%s\\g<sep>\\g<section>' % replace)
self.formal_content = self.drop_started_empty_lines(drop_name).rstrip()
def parse_2_instance(self, name):
result = []
for each_line in self.split_line_by_indent(self.formal_content):
raw_str_lis = self.parse_line_to_lis(each_line, name)
chain = self.parse_pattern(Token(raw_str_lis))
result.append(chain)
self.instances = result
indent_re = re.compile(r'^ *')
def split_line_by_indent(self, text):
lines = text.splitlines()
if len(lines) == 1:
yield lines[0]
return
first_line = lines.pop(0)
line_to_join = [first_line]
indent = len(
self.indent_re.match(first_line.expandtabs()).group())
while lines:
this_line = lines.pop(0)
this_indent = len(
self.indent_re.match(this_line.expandtabs()).group())
if this_indent > indent:
line_to_join.append(this_line)
else:
yield ''.join(line_to_join)
line_to_join[:] = (this_line,)
indent = len(
self.indent_re.match(this_line.expandtabs()).group())
else:
yield ' '.join(line_to_join)
def parse_line_to_lis(self, line, name=None):
if name is not None:
_, find_name, line = line.partition(name)
if not find_name:
raise DocpieError(
'%s is not in usage pattern %s' % (name, _))
# wrapped_space = self.wrap_symbol_re.sub(r' \1 ', line.strip())
# logger.debug(wrapped_space)
# result = [x for x in self.split_re.split(wrapped_space) if x]
angle_bracket_re = self.angle_bracket_re
wrap_symbol_re = self.wrap_symbol_re
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
sep_by_angle = angle_bracket_re.split(line)
except ValueError:
sep_by_angle = [line]
wrap_space = []
for index, each_block in enumerate(sep_by_angle):
if index % 2:
wrap_space.append(each_block)
continue
if not each_block:
continue
warped_space = wrap_symbol_re.sub(r' \1 ', each_block)
wrap_space.append(warped_space)
wraped = ''.join(wrap_space)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
sep = self.split_re.split(wraped)
except ValueError:
sep = [wraped]
result = list(filter(None, sep))
# drop name
if name is None:
result.pop(0)
return result
@classmethod
def find_optionshortcut_and_outside_option_names(cls, lis):
opt_ouside = []
opt_cuts = []
for element in lis:
if isinstance(element, OptionsShortcut):
opt_cuts.append(element)
elif isinstance(element, list):
outside, srtcuts = \
cls.find_optionshortcut_and_outside_option_names(element)
opt_ouside.extend(outside)
opt_cuts.extend(srtcuts)
elif isinstance(element, Option):
opt_ouside.append(element)
return opt_ouside, opt_cuts
def fix_option_and_empty(self):
result = []
all_options = []
logger.debug(self.titled_opt_to_ins)
for options in self.titled_opt_to_ins.values():
for each_potion in options.values():
all_options.append(each_potion[0])
for each_usage in self.instances:
ins = Required(*each_usage).fix()
if ins is None:
result.append(Optional())
continue
outside_opts, opt_shortcuts = \
self.find_optionshortcut_and_outside_option_names(ins)
logger.debug(outside_opts)
for opt_cut in opt_shortcuts:
for opt_ins in outside_opts:
opt_cut.set_hide(opt_ins.names)
all_options.extend(outside_opts)
for usage in ins.expand():
usage.push_option_ahead()
# [options] -a
# Options: -a
# TODO: better solution
r = usage.fix()
if r is None:
result.append(Optional())
else:
result.append(r)
self.instances = result
self.all_options = all_options
|
TylerTemp/docpie
|
docpie/parser.py
|
UsageParser.parse_content
|
python
|
def parse_content(self, text):
"""get Usage section and set to `raw_content`, `formal_content` of no
title and empty-line version"""
match = re.search(
self.usage_re_str.format(self.usage_name),
text,
flags=(re.DOTALL
if self.case_sensitive
else (re.DOTALL | re.IGNORECASE)))
if match is None:
return
dic = match.groupdict()
logger.debug(dic)
self.raw_content = dic['raw']
if dic['sep'] in ('\n', '\r\n'):
self.formal_content = dic['section']
return
reallen = len(dic['name'])
replace = ''.ljust(reallen)
drop_name = match.expand('%s\\g<sep>\\g<section>' % replace)
self.formal_content = self.drop_started_empty_lines(drop_name).rstrip()
|
get Usage section and set to `raw_content`, `formal_content` of no
title and empty-line version
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/parser.py#L772-L795
|
[
"def drop_started_empty_lines(cls, text):\n # drop the empty lines at start\n # different from lstrip\n logger.debug(repr(text))\n m = cls.started_empty_lines.match(text)\n if m is None:\n return text\n return m.groupdict()['rest']\n"
] |
class UsageParser(Parser):
angle_bracket_re = re.compile(r'(<.*?>)')
wrap_symbol_re = re.compile(
r'(\[[^\]\s]*?options\]|\.\.\.|\||\[|\]|\(|\))'
)
split_re = re.compile(r'(\[[^\]\s]*?options\]|\S*<.*?>\S*)|\s+')
# will match '-', '--', and
# flag ::= "-" [ "-" ] chars "=<" chars ">"
# it will also match '---flag', so use startswith('---') to check
std_flag_eq_arg_re = re.compile(r'(?P<flag>^-{1,2}[\da-zA-Z_\-]*)'
r'='
r'(?P<arg><.*?>)'
r'$')
usage_re_str = (r'(?:^|\n)'
r'(?P<raw>'
r'(?P<name>[\ \t]*{0}[\ \t]*)'
r'(?P<sep>(\r?\n)?)'
r'(?P<section>.*?)'
r')'
r'\s*'
r'(?:\n\s*\n|\n\s*$|$)')
def __init__(self, usage_name, case_sensitive,
stdopt, attachopt, attachvalue, namedoptions):
self.usage_name = re.escape(usage_name)
self.case_sensitive = case_sensitive
self.stdopt = stdopt
self.attachopt = attachopt
self.attachvalue = attachvalue
self.titled_opt_to_ins = {}
self.options = None
self.raw_content = None
self.formal_content = None
self.instances = None
self.all_options = None
self.namedoptions = namedoptions
# self._chain = self._parse_text(text, name)
def parse(self, text, name, options):
self.options = options
self.set_option_name_2_instance(options)
if text is not None:
self.parse_content(text)
if self.formal_content is None:
raise DocpieError('"Usage:" not found')
self.parse_2_instance(name)
self.fix_option_and_empty()
def set_option_name_2_instance(self, options):
"""{title: {'-a': Option(), '--all': Option()}}"""
title_opt_2_ins = self.titled_opt_to_ins
title_opt_2_ins.clear()
for title, opts in options.items():
title_opt_2_ins[title] = opt_2_ins = {}
for each in opts:
opt_ins = each[0] # get Option inside Optional/Required
for name in opt_ins.names:
opt_2_ins[name] = each
def parse_content(self, text):
"""get Usage section and set to `raw_content`, `formal_content` of no
title and empty-line version"""
match = re.search(
self.usage_re_str.format(self.usage_name),
text,
flags=(re.DOTALL
if self.case_sensitive
else (re.DOTALL | re.IGNORECASE)))
if match is None:
return
dic = match.groupdict()
logger.debug(dic)
self.raw_content = dic['raw']
if dic['sep'] in ('\n', '\r\n'):
self.formal_content = dic['section']
return
reallen = len(dic['name'])
replace = ''.ljust(reallen)
drop_name = match.expand('%s\\g<sep>\\g<section>' % replace)
self.formal_content = self.drop_started_empty_lines(drop_name).rstrip()
def parse_2_instance(self, name):
result = []
for each_line in self.split_line_by_indent(self.formal_content):
raw_str_lis = self.parse_line_to_lis(each_line, name)
chain = self.parse_pattern(Token(raw_str_lis))
result.append(chain)
self.instances = result
indent_re = re.compile(r'^ *')
def split_line_by_indent(self, text):
lines = text.splitlines()
if len(lines) == 1:
yield lines[0]
return
first_line = lines.pop(0)
line_to_join = [first_line]
indent = len(
self.indent_re.match(first_line.expandtabs()).group())
while lines:
this_line = lines.pop(0)
this_indent = len(
self.indent_re.match(this_line.expandtabs()).group())
if this_indent > indent:
line_to_join.append(this_line)
else:
yield ''.join(line_to_join)
line_to_join[:] = (this_line,)
indent = len(
self.indent_re.match(this_line.expandtabs()).group())
else:
yield ' '.join(line_to_join)
def parse_line_to_lis(self, line, name=None):
if name is not None:
_, find_name, line = line.partition(name)
if not find_name:
raise DocpieError(
'%s is not in usage pattern %s' % (name, _))
# wrapped_space = self.wrap_symbol_re.sub(r' \1 ', line.strip())
# logger.debug(wrapped_space)
# result = [x for x in self.split_re.split(wrapped_space) if x]
angle_bracket_re = self.angle_bracket_re
wrap_symbol_re = self.wrap_symbol_re
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
sep_by_angle = angle_bracket_re.split(line)
except ValueError:
sep_by_angle = [line]
wrap_space = []
for index, each_block in enumerate(sep_by_angle):
if index % 2:
wrap_space.append(each_block)
continue
if not each_block:
continue
warped_space = wrap_symbol_re.sub(r' \1 ', each_block)
wrap_space.append(warped_space)
wraped = ''.join(wrap_space)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
sep = self.split_re.split(wraped)
except ValueError:
sep = [wraped]
result = list(filter(None, sep))
# drop name
if name is None:
result.pop(0)
return result
@classmethod
def find_optionshortcut_and_outside_option_names(cls, lis):
opt_ouside = []
opt_cuts = []
for element in lis:
if isinstance(element, OptionsShortcut):
opt_cuts.append(element)
elif isinstance(element, list):
outside, srtcuts = \
cls.find_optionshortcut_and_outside_option_names(element)
opt_ouside.extend(outside)
opt_cuts.extend(srtcuts)
elif isinstance(element, Option):
opt_ouside.append(element)
return opt_ouside, opt_cuts
def fix_option_and_empty(self):
result = []
all_options = []
logger.debug(self.titled_opt_to_ins)
for options in self.titled_opt_to_ins.values():
for each_potion in options.values():
all_options.append(each_potion[0])
for each_usage in self.instances:
ins = Required(*each_usage).fix()
if ins is None:
result.append(Optional())
continue
outside_opts, opt_shortcuts = \
self.find_optionshortcut_and_outside_option_names(ins)
logger.debug(outside_opts)
for opt_cut in opt_shortcuts:
for opt_ins in outside_opts:
opt_cut.set_hide(opt_ins.names)
all_options.extend(outside_opts)
for usage in ins.expand():
usage.push_option_ahead()
# [options] -a
# Options: -a
# TODO: better solution
r = usage.fix()
if r is None:
result.append(Optional())
else:
result.append(r)
self.instances = result
self.all_options = all_options
|
TylerTemp/docpie
|
docpie/__init__.py
|
docpie
|
python
|
def docpie(doc, argv=None, help=True, version=None,
stdopt=True, attachopt=True, attachvalue=True,
helpstyle='python',
auto2dashes=True, name=None, case_sensitive=False,
optionsfirst=False, appearedonly=False, namedoptions=False,
extra=None):
if case_sensitive:
warnings.warn('`case_sensitive` is deprecated, `docpie` is always '
'case insensitive')
kwargs = locals()
argv = kwargs.pop('argv')
pie = Docpie(**kwargs)
pie.docpie(argv)
return pie
|
Parse `argv` based on command-line interface described in `doc`.
`docpie` creates your command-line interface based on its
description that you pass as `doc`. Such description can contain
--options, <positional-argument>, commands, which could be
[optional], (required), (mutually | exclusive) or repeated...
Parameters
----------
doc : str
Description of your command-line interface.
argv : list of str, optional
Argument vector to be parsed. sys.argv is used if not
provided.
help : bool (default: True)
Set to False to disable automatic help on -h or --help
options.
version : any object but None
If passed, the object will be printed if --version is in
`argv`.
stdopt : bool (default: True)
When it's True, long flag should only starts with --
attachopt: bool (default: True)
write/pass several short flag into one, e.g. -abc can mean -a -b -c.
This only works when stdopt=True
attachvalue: bool (default: True)
allow you to write short flag and its value together,
e.g. -abc can mean -a bc
auto2dashes: bool (default: True)
automaticly handle -- (which means "end of command line flag")
name: str (default: None)
the "name" of your program. In each of your "usage" the "name" will be
ignored. By default docpie will ignore the first element of your
"usage".
case_sensitive: bool (deprecated / default: False)
specifies if it need case sensitive when matching
"Usage:" and "Options:"
optionsfirst: bool (default: False)
everything after first positional argument will be interpreted as
positional argument
appearedonly: bool (default: False)
when set True, the options that never appear in argv will not
be put in result. Note this only affect options
extra: dict
customize pre-handled options. See
http://docpie.comes.today/document/advanced-apis/
for more infomation.
Returns
-------
args : dict
A dictionary, where keys are names of command-line elements
such as e.g. "--verbose" and "<path>", and values are the
parsed values of those elements.
Example
-------
>>> from docpie import docpie
>>> doc = '''
... Usage:
... my_program tcp <host> <port> [--timeout=<seconds>]
... my_program serial <port> [--baud=<n>] [--timeout=<seconds>]
... my_program (-h | --help | --version)
...
... Options:
... -h, --help Show this screen and exit.
... --baud=<n> Baudrate [default: 9600]
... '''
>>> argv = ['my_program', 'tcp', '127.0.0.1', '80', '--timeout', '30']
>>> docpie(doc, argv)
{
'--': False,
'-h': False,
'--baud': '9600',
'--help': False,
'--timeout': '30',
'--version': False,
'<host>': '127.0.0.1',
'<port>': '80',
'serial': False,
'tcp': True}
See also
--------
* Full documentation is available in README.md as well as online
at http://docpie.comes.today/document/quick-start/
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/__init__.py#L34-L133
|
[
"def docpie(self, argv=None):\n \"\"\"match the argv for each usages, return dict.\n\n if argv is None, it will use sys.argv instead.\n if argv is str, it will call argv.split() first.\n this function will check the options in self.extra and handle it first.\n Which means it may not try to match any usages because of the checking.\n \"\"\"\n\n token = self._prepare_token(argv)\n # check first, raise after\n # so `-hwhatever` can trigger `-h` first\n self.check_flag_and_handler(token)\n\n if token.error is not None:\n # raise DocpieExit('%s\\n\\n%s' % (token.error, help_msg))\n self.exception_handler(token.error)\n\n try:\n result, dashed = self._match(token)\n except DocpieExit as e:\n self.exception_handler(e)\n\n # if error is not None:\n # self.exception_handler(error)\n\n value = result.get_value(self.appeared_only, False)\n self.clear()\n self.update(value)\n if self.appeared_only:\n self._drop_non_appeared()\n\n logger.debug('get all matched value %s', self)\n rest = list(self.usages) # a copy\n rest.remove(result)\n self._add_rest_value(rest)\n logger.debug('merged rest values, now %s', self)\n self._add_option_value()\n self._dashes_value(dashed)\n\n return dict(self) # remove all other reference in this instance\n"
] |
"""
An easy and Pythonic command-line interface parser.
* http://docpie.comes.today
* Repository and issue-tracker: https://github.com/TylerTemp/docpie
* Licensed under terms of MIT license (see LICENSE)
* Copyright (c) 2015-2016 TylerTemp, tylertempdev@gmail.com
"""
from docpie.pie import Docpie
from docpie.error import DocpieException, DocpieExit, DocpieError, \
UnknownOptionExit, ExceptNoArgumentExit, \
ExpectArgumentExit, \
ExpectArgumentHitDoubleDashesExit, \
AmbiguousPrefixExit
from logging import getLogger
import warnings
__all__ = ['docpie', 'Docpie',
'DocpieException', 'DocpieExit', 'DocpieError',
'UnknownOptionExit', 'ExceptNoArgumentExit',
'ExpectArgumentExit', 'ExpectArgumentHitDoubleDashesExit',
'AmbiguousPrefixExit',
'logger']
# it's not a good idea but it can avoid loop importing
__version__ = Docpie._version
__timestamp__ = 1553178291.793742 # last sumbit
logger = getLogger('docpie')
if __name__ == '__main__':
doc = """Naval Fate.
Usage:
naval_fate.py ship new <name>...
naval_fate.py ship <name> move <x> <y> [--speed=<kn>]
naval_fate.py ship shoot <x> <y>
naval_fate.py mine (set|remove) <x> <y> [--moored | --drifting]
naval_fate.py (-h | --help)
naval_fate.py --version
Options:
-h --help Show this screen.
--version Show version.
--speed=<kn> Speed in knots [default: 10].
--moored Moored (anchored) mine.
--drifting Drifting mine.
"""
arguments = docpie(doc, version='Naval Fate 2.0')
print(arguments)
|
TylerTemp/docpie
|
docpie/tracemore.py
|
print_exc_plus
|
python
|
def print_exc_plus(stream=sys.stdout):
'''print normal traceback information with some local arg values'''
# code of this mothod is mainly from <Python Cookbook>
write = stream.write # assert the mothod exists
flush = stream.flush
tp, value, tb = sys.exc_info()
while tb.tb_next:
tb = tb.tb_next
stack = list()
f = tb.tb_frame
while f:
stack.append(f)
f = f.f_back
stack.reverse()
try:
traceback.print_exc(None, stream)
except BaseException as e:
write(u("FAILED PRINTING TRACE\n\n"))
write(u(str(value)))
write(u('\n\n'))
finally:
flush()
write(u('Locals by frame, innermost last\n'))
for frame in stack:
write(u('\nFrame %s in %s at line %s\n' % (frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno)))
for key, value, in frame.f_locals.items():
write(u('\t%20s = ' % key))
try:
write(u('%s\n' % value))
except BaseException:
write(u('<ERROR WHILE PRINTING VALUE>\n'))
flush()
|
print normal traceback information with some local arg values
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/tracemore.py#L22-L56
|
[
"def u(strs):\n return strs\n"
] |
import sys
import traceback
import logging
try:
from io import StringIO
except ImportError:
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
if sys.version_info[0] < 3:
def u(strs):
return strs.decode('utf-8')
else:
def u(strs):
return strs
_tmp_stream = StringIO()
def get_exc_plus():
_tmp_stream.seek(0)
_tmp_stream.truncate()
print_exc_plus(_tmp_stream)
_tmp_stream.seek(0)
return _tmp_stream.read()
if __name__ == '__main__':
def zero_error():
local_1 = range(5)
local_2 = 'a local arg'
1/0
stream = StringIO()
try:
zero_error()
except Exception:
print_exc_plus(stream)
print('Err from stream:')
print(stream.getvalue())
stream.close()
try:
zero_error()
except Exception:
print('Err from get_exc_plus():')
print(get_exc_plus())
|
TylerTemp/docpie
|
docpie/element.py
|
Either.fix_argument_only
|
python
|
def fix_argument_only(self):
'''
fix_argument_only() -> Either or Unit(Argument)
`<arg> | ARG | <arg3>` ->
`Required(Argument('<arg>', 'ARG', '<arg3>'))`
`[<arg>] | [ARG] | [<arg3>]` ->
`Optional(Argument('<arg>', 'ARG', '<arg3>'))`
`(<arg>) | [ARG]` -> not change, return self
`-a | --better` -> not change
'''
# for idx, branch in enumerate(self):
# if isinstance(branch[0], Either):
# self[idx] = branch.fix()
first_type = type(self[0])
if first_type not in (Required, Optional):
return self
for branch in self:
if not (len(branch) == 1 and
isinstance(branch, first_type) and
isinstance(branch[0], Argument)):
logger.debug('fix %r not change', self)
return self
else:
first = self[0][0]
for each in self:
first.names.update(each[0].names)
result = first_type(first)
logger.debug('fix %r -> %r', self, result)
return result
|
fix_argument_only() -> Either or Unit(Argument)
`<arg> | ARG | <arg3>` ->
`Required(Argument('<arg>', 'ARG', '<arg3>'))`
`[<arg>] | [ARG] | [<arg3>]` ->
`Optional(Argument('<arg>', 'ARG', '<arg3>'))`
`(<arg>) | [ARG]` -> not change, return self
`-a | --better` -> not change
|
train
|
https://github.com/TylerTemp/docpie/blob/e658454b81b6c79a020d499f12ad73496392c09a/docpie/element.py#L1517-L1545
| null |
class Either(list):
error = None
def __init__(self, *branch):
assert(all(isinstance(x, Unit) for x in branch))
super(Either, self).__init__(branch)
self.matched_branch = -1
def fix(self):
if not self:
return None
result = []
for each in self:
fixed = each.fix()
if fixed is not None:
result.append(fixed)
if not result:
return None
self[:] = result
return self.fix_argument_only()
def fix_argument_only(self):
'''
fix_argument_only() -> Either or Unit(Argument)
`<arg> | ARG | <arg3>` ->
`Required(Argument('<arg>', 'ARG', '<arg3>'))`
`[<arg>] | [ARG] | [<arg3>]` ->
`Optional(Argument('<arg>', 'ARG', '<arg3>'))`
`(<arg>) | [ARG]` -> not change, return self
`-a | --better` -> not change
'''
# for idx, branch in enumerate(self):
# if isinstance(branch[0], Either):
# self[idx] = branch.fix()
first_type = type(self[0])
if first_type not in (Required, Optional):
return self
for branch in self:
if not (len(branch) == 1 and
isinstance(branch, first_type) and
isinstance(branch[0], Argument)):
logger.debug('fix %r not change', self)
return self
else:
first = self[0][0]
for each in self:
first.names.update(each[0].names)
result = first_type(first)
logger.debug('fix %r -> %r', self, result)
return result
def arg_range(self):
result = set()
for each in self:
result.update(each.arg_range())
return list(result)
def __repr__(self):
return 'Either(%s)' % (', '.join(repr(x) for x in self))
|
rosshamish/hexgrid
|
hexgrid.py
|
location
|
python
|
def location(hexgrid_type, coord):
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
|
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L72-L97
|
[
"def nearest_tile_to_node(node_coord):\n \"\"\"\n Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().\n Returns a tile identifier.\n\n :param node_coord: node coordinate to find an adjacent tile to, int\n :return: tile identifier of an adjacent tile, Tile.tile_id\n \"\"\"\n return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)\n",
"def tile_node_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.\n\n :param offset: node_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_node_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))\n return 'ZZ'\n",
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def nearest_tile_to_edge(edge_coord):\n \"\"\"\n Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().\n Returns a tile identifier.\n\n :param edge_coord: edge coordinate to find an adjacent tile to, int\n :return: tile identifier of an adjacent tile, Tile.tile_id\n \"\"\"\n return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)\n",
"def tile_edge_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.\n\n :param offset: edge_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_edge_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))\n return 'ZZ'\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
coastal_coords
|
python
|
def coastal_coords():
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
|
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L128-L144
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def tile_edge_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.\n\n :param offset: edge_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_edge_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))\n return 'ZZ'\n",
"def coastal_tile_ids():\n \"\"\"\n Returns a list of tile identifiers which lie on the border of the grid.\n \"\"\"\n return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))\n",
"def coastal_edges(tile_id):\n \"\"\"\n Returns a list of coastal edge coordinate.\n\n An edge is coastal if it is on the grid's border.\n :return: list(int)\n \"\"\"\n edges = list()\n tile_coord = tile_id_to_coord(tile_id)\n for edge_coord in edges_touching_tile(tile_id):\n dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)\n if tile_id_in_direction(tile_id, dirn) is None:\n edges.append(edge_coord)\n return edges\n",
"def tile_id_in_direction(from_tile_id, direction):\n \"\"\"\n Variant on direction_to_tile. Returns None if there's no tile there.\n\n :param from_tile_id: tile identifier, int\n :param direction: str\n :return: tile identifier, int or None\n \"\"\"\n coord_from = tile_id_to_coord(from_tile_id)\n for offset, dirn in _tile_tile_offsets.items():\n if dirn == direction:\n coord_to = coord_from + offset\n if coord_to in legal_tile_coords():\n return tile_id_from_coord(coord_to)\n return None\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
coastal_edges
|
python
|
def coastal_edges(tile_id):
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
|
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L147-L160
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def tile_edge_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.\n\n :param offset: edge_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_edge_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))\n return 'ZZ'\n",
"def tile_id_in_direction(from_tile_id, direction):\n \"\"\"\n Variant on direction_to_tile. Returns None if there's no tile there.\n\n :param from_tile_id: tile identifier, int\n :param direction: str\n :return: tile identifier, int or None\n \"\"\"\n coord_from = tile_id_to_coord(from_tile_id)\n for offset, dirn in _tile_tile_offsets.items():\n if dirn == direction:\n coord_to = coord_from + offset\n if coord_to in legal_tile_coords():\n return tile_id_from_coord(coord_to)\n return None\n",
"def edges_touching_tile(tile_id):\n \"\"\"\n Get a list of edge coordinates touching the given tile.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: list of edge coordinates touching the given tile, list(int)\n \"\"\"\n coord = tile_id_to_coord(tile_id)\n edges = []\n for offset in _tile_edge_offsets.keys():\n edges.append(coord + offset)\n # logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))\n return edges\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
tile_id_in_direction
|
python
|
def tile_id_in_direction(from_tile_id, direction):
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
|
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L163-L177
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def legal_tile_coords():\n \"\"\"\n Return all legal tile coordinates on the grid\n \"\"\"\n return set(_tile_id_to_coord.values())\n",
"def tile_id_from_coord(coord):\n \"\"\"\n Convert a tile coordinate to its corresponding tile identifier.\n\n :param coord: coordinate of the tile, int\n :return: tile identifier, Tile.tile_id\n \"\"\"\n for i, c in _tile_id_to_coord.items():\n if c == coord:\n return i\n raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
direction_to_tile
|
python
|
def direction_to_tile(from_tile_id, to_tile_id):
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
|
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L180-L197
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def tile_tile_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.\n\n :param offset: tile_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_tile_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))\n return 'ZZ'\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
edge_coord_in_direction
|
python
|
def edge_coord_in_direction(tile_id, direction):
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
|
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L242-L257
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def tile_edge_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.\n\n :param offset: edge_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_edge_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))\n return 'ZZ'\n",
"def edges_touching_tile(tile_id):\n \"\"\"\n Get a list of edge coordinates touching the given tile.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: list of edge coordinates touching the given tile, list(int)\n \"\"\"\n coord = tile_id_to_coord(tile_id)\n edges = []\n for offset in _tile_edge_offsets.keys():\n edges.append(coord + offset)\n # logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))\n return edges\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
node_coord_in_direction
|
python
|
def node_coord_in_direction(tile_id, direction):
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
|
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L260-L275
|
[
"def tile_node_offset_to_direction(offset):\n \"\"\"\n Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.\n\n :param offset: node_coord - tile_coord, int\n :return: direction of the offset, str\n \"\"\"\n try:\n return _tile_node_offsets[offset]\n except KeyError:\n logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))\n return 'ZZ'\n",
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n",
"def nodes_touching_tile(tile_id):\n \"\"\"\n Get a list of node coordinates touching the given tile.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: list of node coordinates touching the given tile, list(int)\n \"\"\"\n coord = tile_id_to_coord(tile_id)\n nodes = []\n for offset in _tile_node_offsets.keys():\n nodes.append(coord + offset)\n # logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))\n return nodes\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
tile_id_from_coord
|
python
|
def tile_id_from_coord(coord):
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
|
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L293-L303
| null |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
nearest_tile_to_edge_using_tiles
|
python
|
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
|
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L317-L328
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
nearest_tile_to_node_using_tiles
|
python
|
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
|
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L342-L353
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
edges_touching_tile
|
python
|
def edges_touching_tile(tile_id):
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
|
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L356-L368
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def nodes_touching_tile(tile_id):
"""
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
rosshamish/hexgrid
|
hexgrid.py
|
nodes_touching_tile
|
python
|
def nodes_touching_tile(tile_id):
coord = tile_id_to_coord(tile_id)
nodes = []
for offset in _tile_node_offsets.keys():
nodes.append(coord + offset)
# logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes))
return nodes
|
Get a list of node coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of node coordinates touching the given tile, list(int)
|
train
|
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L371-L383
|
[
"def tile_id_to_coord(tile_id):\n \"\"\"\n Convert a tile identifier to its corresponding grid coordinate.\n\n :param tile_id: tile identifier, Tile.tile_id\n :return: coordinate of the tile, int\n \"\"\"\n try:\n return _tile_id_to_coord[tile_id]\n except KeyError:\n logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))\n return -1\n"
] |
"""
module hexgrid provides functions for working with a hexagonal settlers of catan grid.
This module implements the coordinate system described in Robert S. Thomas's PhD dissertation on
JSettlers2, Appendix A. See the project at https://github.com/jdmonin/JSettlers2 for details.
Grids have tiles, nodes, and edges. Tiles, nodes, and edges all have coordinates
on the grid. Tiles also have identifiers numbered counter-clockwise starting from
the north-west edge. There are 19 tiles.
Adjacent locations can be computed by adding an offset to the given location. These
offsets are defined as dictionaries named _<type1>_<type2>_offsets, mapping offset->direction.
This direction is a cardinal direction represented as a string.
The edge and node coordinate spaces share values. That is, the coordinate value is
not enough to uniquely identify a location on the grid. For that reason, it is recommended
to represent locations as a (CoordType, 0xCoord) pair, each of which is guaranteed
to be unique.
See individual methods for usage.
"""
import logging
__author__ = "Ross Anderson <ross.anderson@ualberta.ca>"
__version__ = "0.2.1"
EDGE = 0
NODE = 1
TILE = 2
_tile_id_to_coord = {
# 1-19 clockwise starting from Top-Left
1: 0x37, 12: 0x59, 11: 0x7B,
2: 0x35, 13: 0x57, 18: 0x79, 10: 0x9B,
3: 0x33, 14: 0x55, 19: 0x77, 17: 0x99, 9: 0xBB,
4: 0x53, 15: 0x75, 16: 0x97, 8: 0xB9,
5: 0x73, 6: 0x95, 7: 0xB7
}
_tile_tile_offsets = {
# tile_coord - tile_coord
-0x20: 'NW',
-0x22: 'W',
-0x02: 'SW',
+0x20: 'SE',
+0x22: 'E',
+0x02: 'NE',
}
_tile_node_offsets = {
# node_coord - tile_coord
+0x01: 'N',
-0x10: 'NW',
-0x01: 'SW',
+0x10: 'S',
+0x21: 'SE',
+0x12: 'NE',
}
_tile_edge_offsets = {
# edge_coord - tile_coord
-0x10: 'NW',
-0x11: 'W',
-0x01: 'SW',
+0x10: 'SE',
+0x11: 'E',
+0x01: 'NE',
}
def location(hexgrid_type, coord):
"""
Returns a formatted string representing the coordinate. The format depends on the
coordinate type.
Tiles look like: 1, 12
Nodes look like: (1 NW), (12 S)
Edges look like: (1 NW), (12 SE)
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param coord: integer coordinate in this module's hexadecimal coordinate system
:return: formatted string for display
"""
if hexgrid_type == TILE:
return str(coord)
elif hexgrid_type == NODE:
tile_id = nearest_tile_to_node(coord)
dirn = tile_node_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
elif hexgrid_type == EDGE:
tile_id = nearest_tile_to_edge(coord)
dirn = tile_edge_offset_to_direction(coord - tile_id_to_coord(tile_id))
return '({} {})'.format(tile_id, dirn)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def from_location(hexgrid_type, tile_id, direction=None):
"""
:param hexgrid_type: hexgrid.TILE, hexgrid.NODE, hexgrid.EDGE
:param tile_id: tile identifier, int
:param direction: str
:return: integer coordinate in this module's hexadecimal coordinate system
"""
if hexgrid_type == TILE:
if direction is not None:
raise ValueError('tiles do not have a direction')
return tile_id_to_coord(tile_id)
elif hexgrid_type == NODE:
return node_coord_in_direction(tile_id, direction)
elif hexgrid_type == EDGE:
return edge_coord_in_direction(tile_id, direction)
else:
logging.warning('unsupported hexgrid_type={}'.format(hexgrid_type))
return None
def coastal_tile_ids():
"""
Returns a list of tile identifiers which lie on the border of the grid.
"""
return list(filter(lambda tid: len(coastal_edges(tid)) > 0, legal_tile_ids()))
def coastal_coords():
"""
A coastal coord is a 2-tuple: (tile id, direction).
An edge is coastal if it is on the grid's border.
:return: list( (tile_id, direction) )
"""
coast = list()
for tile_id in coastal_tile_ids():
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in coastal_edges(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
coast.append((tile_id, dirn))
# logging.debug('coast={}'.format(coast))
return coast
def coastal_edges(tile_id):
"""
Returns a list of coastal edge coordinate.
An edge is coastal if it is on the grid's border.
:return: list(int)
"""
edges = list()
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
dirn = tile_edge_offset_to_direction(edge_coord - tile_coord)
if tile_id_in_direction(tile_id, dirn) is None:
edges.append(edge_coord)
return edges
def tile_id_in_direction(from_tile_id, direction):
"""
Variant on direction_to_tile. Returns None if there's no tile there.
:param from_tile_id: tile identifier, int
:param direction: str
:return: tile identifier, int or None
"""
coord_from = tile_id_to_coord(from_tile_id)
for offset, dirn in _tile_tile_offsets.items():
if dirn == direction:
coord_to = coord_from + offset
if coord_to in legal_tile_coords():
return tile_id_from_coord(coord_to)
return None
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction
def tile_tile_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-tile offset. The tiles must be adjacent.
:param offset: tile_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_tile_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-tile offset={:x}'.format(offset))
return 'ZZ'
def tile_node_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-node offset. The tile and node must be adjacent.
:param offset: node_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_node_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-node offset={:x}'.format(offset))
return 'ZZ'
def tile_edge_offset_to_direction(offset):
"""
Get the cardinal direction of a tile-edge offset. The tile and edge must be adjacent.
:param offset: edge_coord - tile_coord, int
:return: direction of the offset, str
"""
try:
return _tile_edge_offsets[offset]
except KeyError:
logging.critical('Attempted getting direction of non-existent tile-edge offset={:x}'.format(offset))
return 'ZZ'
def edge_coord_in_direction(tile_id, direction):
"""
Returns the edge coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: edge coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for edge_coord in edges_touching_tile(tile_id):
if tile_edge_offset_to_direction(edge_coord - tile_coord) == direction:
return edge_coord
raise ValueError('No edge found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def node_coord_in_direction(tile_id, direction):
"""
Returns the node coordinate in the given direction at the given tile identifier.
:param tile_id: tile identifier, int
:param direction: direction, str
:return: node coord, int
"""
tile_coord = tile_id_to_coord(tile_id)
for node_coord in nodes_touching_tile(tile_id):
if tile_node_offset_to_direction(node_coord - tile_coord) == direction:
return node_coord
raise ValueError('No node found in direction={} at tile_id={}'.format(
direction,
tile_id
))
def tile_id_to_coord(tile_id):
"""
Convert a tile identifier to its corresponding grid coordinate.
:param tile_id: tile identifier, Tile.tile_id
:return: coordinate of the tile, int
"""
try:
return _tile_id_to_coord[tile_id]
except KeyError:
logging.critical('Attempted conversion of non-existent tile_id={}'.format(tile_id))
return -1
def tile_id_from_coord(coord):
"""
Convert a tile coordinate to its corresponding tile identifier.
:param coord: coordinate of the tile, int
:return: tile identifier, Tile.tile_id
"""
for i, c in _tile_id_to_coord.items():
if c == coord:
return i
raise Exception('Tile id lookup failed, coord={} not found in map'.format(hex(coord)))
def nearest_tile_to_edge(edge_coord):
"""
Convenience method wrapping nearest_tile_to_edge_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_edge_using_tiles(legal_tile_ids(), edge_coord)
def nearest_tile_to_edge_using_tiles(tile_ids, edge_coord):
"""
Get the first tile found adjacent to the given edge. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param edge_coord: edge coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if edge_coord - tile_id_to_coord(tile_id) in _tile_edge_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching edge={}'.format(edge_coord))
def nearest_tile_to_node(node_coord):
"""
Convenience method wrapping nearest_tile_to_node_using_tiles. Looks at all tiles in legal_tile_ids().
Returns a tile identifier.
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
return nearest_tile_to_node_using_tiles(legal_tile_ids(), node_coord)
def nearest_tile_to_node_using_tiles(tile_ids, node_coord):
"""
Get the first tile found adjacent to the given node. Returns a tile identifier.
:param tile_ids: tiles to look at for adjacency, list(Tile.tile_id)
:param node_coord: node coordinate to find an adjacent tile to, int
:return: tile identifier of an adjacent tile, Tile.tile_id
"""
for tile_id in tile_ids:
if node_coord - tile_id_to_coord(tile_id) in _tile_node_offsets.keys():
return tile_id
logging.critical('Did not find a tile touching node={}'.format(node_coord))
def edges_touching_tile(tile_id):
"""
Get a list of edge coordinates touching the given tile.
:param tile_id: tile identifier, Tile.tile_id
:return: list of edge coordinates touching the given tile, list(int)
"""
coord = tile_id_to_coord(tile_id)
edges = []
for offset in _tile_edge_offsets.keys():
edges.append(coord + offset)
# logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges))
return edges
def nodes_touching_edge(edge_coord):
"""
Returns the two node coordinates which are on the given edge coordinate.
:return: list of 2 node coordinates which are on the given edge coordinate, list(int)
"""
a, b = hex_digit(edge_coord, 1), hex_digit(edge_coord, 2)
if a % 2 == 0 and b % 2 == 0:
return [coord_from_hex_digits(a, b + 1),
coord_from_hex_digits(a + 1, b)]
else:
return [coord_from_hex_digits(a, b),
coord_from_hex_digits(a + 1, b + 1)]
def legal_edge_coords():
"""
Return all legal edge coordinates on the grid.
"""
edges = set()
for tile_id in legal_tile_ids():
for edge in edges_touching_tile(tile_id):
edges.add(edge)
logging.debug('Legal edge coords({})={}'.format(len(edges), edges))
return edges
def legal_node_coords():
"""
Return all legal node coordinates on the grid
"""
nodes = set()
for tile_id in legal_tile_ids():
for node in nodes_touching_tile(tile_id):
nodes.add(node)
logging.debug('Legal node coords({})={}'.format(len(nodes), nodes))
return nodes
def legal_tile_ids():
"""
Return all legal tile identifiers on the grid. In the range [1,19] inclusive.
"""
return set(_tile_id_to_coord.keys())
def legal_tile_coords():
"""
Return all legal tile coordinates on the grid
"""
return set(_tile_id_to_coord.values())
def hex_digit(coord, digit=1):
"""
Returns either the first or second digit of the hexadecimal representation of the given coordinate.
:param coord: hexadecimal coordinate, int
:param digit: 1 or 2, meaning either the first or second digit of the hexadecimal
:return: int, either the first or second digit
"""
if digit not in [1,2]:
raise ValueError('hex_digit can only get the first or second digit of a hex number, was passed digit={}'.format(
digit
))
return int(hex(coord)[1+digit], 16)
def coord_from_hex_digits(digit_1, digit_2):
"""
Returns an integer representing the hexadecimal coordinate with the two given hexadecimal digits.
>> hex(coord_from_hex_digits(1, 3))
'0x13'
>> hex(coord_from_hex_digits(1, 10))
'0x1A'
:param digit_1: first digit, int
:param digit_2: second digit, int
:return: hexadecimal coordinate, int
"""
return digit_1*16 + digit_2
def rotate_direction(hexgrid_type, direction, ccw=True):
"""
Takes a direction string associated with a type of hexgrid element, and rotates it one tick in the given direction.
:param direction: string, eg 'NW', 'N', 'SE'
:param ccw: if True, rotates counter clockwise. Otherwise, rotates clockwise.
:return: the rotated direction string, eg 'SW', 'NW', 'S'
"""
if hexgrid_type in [TILE, EDGE]:
directions = ['NW', 'W', 'SW', 'SE', 'E', 'NE', 'NW'] if ccw \
else ['NW', 'NE', 'E', 'SE', 'SW', 'W', 'NW']
return directions[directions.index(direction) + 1]
elif hexgrid_type in [NODE]:
directions = ['N', 'NW', 'SW', 'S', 'SE', 'NE', 'N'] if ccw \
else ['N', 'NE', 'SE', 'S', 'SW', 'NW', 'N']
return directions[directions.index(direction) + 1]
else:
raise ValueError('Invalid hexgrid type={} passed to rotate direction'.format(hexgrid_type))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.