func_code_string stringlengths 52 1.94M | func_documentation_string stringlengths 1 47.2k |
|---|---|
def request(endpoint, verb='GET', session_options=None, **options):
req = functools.partial(_request, endpoint, verb, session_options,
**options)
return _run_in_fresh_loop(req) | Performs a synchronous request.
Uses a dedicated event loop and aiohttp.ClientSession object.
Options:
- endpoint: the endpoint to call
- verb: the HTTP verb to use (defaults: GET)
- session_options: a dict containing options to initialize the session
(defaults: None)
- options: extra o... |
def json_request(endpoint, verb='GET', session_options=None, **options):
req = functools.partial(_request, endpoint, verb, session_options,
json=True, **options)
return _run_in_fresh_loop(req) | Like :func:`molotov.request` but extracts json from the response. |
def get_var(name, factory=None):
if name not in _VARS and factory is not None:
_VARS[name] = factory()
return _VARS.get(name) | Gets a global variable given its name.
If factory is not None and the variable is not set, factory
is a callable that will set the variable.
If not set, returns None. |
async def step(self, step_id, session, scenario=None):
if scenario is None:
scenario = pick_scenario(self.wid, step_id)
try:
await self.send_event('scenario_start', scenario=scenario)
await scenario['func'](session, *scenario['args'],
... | single scenario call.
When it returns 1, it works. -1 the script failed,
0 the test is stopping or needs to stop. |
def main():
parser = argparse.ArgumentParser(description='Github-based load test')
parser.add_argument('--version', action='store_true', default=False,
help='Displays version and exits.')
parser.add_argument('--virtualenv', type=str, default='virtualenv',
... | Moloslave clones a git repo and runs a molotov test |
def remove_directory(directory, show_warnings=True):
errors = []
def onerror(function, path, excinfo):
if show_warnings:
print 'Cannot delete %s: %s' % (os.path.relpath(directory), excinfo[1])
errors.append((function, path, excinfo))
if os.path.exists(directory):
if ... | Deletes a directory and its contents.
Returns a list of errors in form (function, path, excinfo). |
def copy_files(source_files, target_directory, source_directory=None):
try:
os.makedirs(target_directory)
except: # TODO: specific exception?
pass
for f in source_files:
source = os.path.join(source_directory, f) if source_directory else f
target = os.path.join(targe... | Copies a list of files to the specified directory.
If source_directory is provided, it will be prepended to each source file. |
def yes_or_no(message):
while True:
print message, '(yes/no)',
line = raw_input()
if line is None:
return None
line = line.lower()
if line == 'y' or line == 'ye' or line == 'yes':
return True
if line == 'n' or line == 'no':
ret... | Gets user input and returns True for yes and False for no. |
def list_plugins(directory=None):
repo = require_repo(directory)
plugins = get_value(repo, 'plugins')
if not plugins or not isinstance(plugins, dict):
return None
return plugins.keys() | Gets a list of the installed themes. |
def add_plugin(plugin, directory=None):
repo = require_repo(directory)
plugins = get_value(repo, 'plugins', expect_type=dict)
if plugin in plugins:
return False
plugins[plugin] = {}
set_value(repo, 'plugins', plugins)
return True | Adds the specified plugin. This returns False if it was already added. |
def get_plugin_settings(plugin, directory=None):
repo = require_repo(directory)
plugins = get_value(repo, 'plugins')
return plugins.get(plugin) if isinstance(plugins, dict) else None | Gets the settings for the specified plugin. |
def preview(directory=None, host=None, port=None, watch=True):
directory = directory or '.'
host = host or '127.0.0.1'
port = port or 5000
# TODO: admin interface
# TODO: use cache_only to keep from modifying output directly
out_directory = build(directory)
# Serve generated site
os... | Runs a local server to preview the working directory of a repository. |
def require_repo(directory=None):
if directory and not os.path.isdir(directory):
raise ValueError('Directory not found: ' + repr(directory))
repo = repo_path(directory)
if not os.path.isdir(repo):
raise RepositoryNotFoundError(directory)
return repo | Checks for a presentation repository and raises an exception if not found. |
def init(directory=None):
repo = repo_path(directory)
if os.path.isdir(repo):
raise RepositoryAlreadyExistsError(directory, repo)
# Initialize repository with default template
shutil.copytree(default_template_path, repo)
message = '"Default presentation content."'
subprocess.call(['... | Initializes a Gitpress presentation repository at the specified directory. |
def iterate_presentation_files(path=None, excludes=None, includes=None):
# Defaults
if includes is None:
includes = []
if excludes is None:
excludes = []
# Transform glob patterns to regular expressions
includes_pattern = r'|'.join([fnmatch.translate(x) for x in includes]) or r'... | Iterates the repository presentation files relative to 'path',
not including themes. Note that 'includes' take priority. |
def read_config_file(path):
try:
with open(path, 'r') as f:
return json.load(f, object_pairs_hook=OrderedDict)
except IOError as ex:
if ex != errno.ENOENT:
raise
return {} | Returns the configuration from the specified file. |
def write_config(repo_directory, config):
return write_config_file(os.path.join(repo_directory, config_file), config) | Writes the specified configuration to the presentation repository. |
def write_config_file(path, config):
contents = json.dumps(config, indent=4, separators=(',', ': ')) + '\n'
try:
with open(path, 'w') as f:
f.write(contents)
return True
except IOError as ex:
if ex != errno.ENOENT:
raise
return False | Writes the specified configuration to the specified file. |
def get_value(repo_directory, key, expect_type=None):
config = read_config(repo_directory)
value = config.get(key)
if expect_type and value is not None and not isinstance(value, expect_type):
raise ConfigSchemaError('Expected config variable %s to be type %s, got %s'
% (repr(key), r... | Gets the value of the specified key in the config file. |
def set_value(repo_directory, key, value, strict=True):
if value is None:
raise ValueError('Argument "value" must not be None.')
# Read values and do nothing if not making any changes
config = read_config(repo_directory)
old = config.get(key)
if old == value:
return old
# Ch... | Sets the value of a particular key in the config file. This has no effect when setting to the same value. |
def build(content_directory=None, out_directory=None):
content_directory = content_directory or '.'
out_directory = os.path.abspath(out_directory or default_out_directory)
repo = require_repo(content_directory)
# Prevent user mistakes
if out_directory == '.':
raise ValueError('Output di... | Builds the site from its content and presentation repository. |
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
usage = '\n\n\n'.join(__doc__.split('\n\n\n')[1:])
version = 'Gitpress ' + __version__
# Parse options
args = docopt(usage, argv=argv, version=version)
# Execute command
try:
return execute(args)
except Reposi... | The entry point of the application. |
def execute(args):
def info(*message):
if not args['-q']:
print ' '.join(map(str, message))
if args['init']:
try:
repo = init(args['<directory>'])
info('Initialized Gitpress repository in', repo)
except RepositoryAlreadyExistsError as ex:... | Executes the command indicated by the specified parsed arguments. |
def gpp(argv=None):
if argv is None:
argv = sys.argv[1:]
argv.insert(0, 'preview')
return main(argv) | Shortcut function for running the previewing command. |
def list_themes(directory=None):
repo = require_repo(directory)
path = os.path.join(repo, themes_dir)
return os.listdir(path) if os.path.isdir(path) else None | Gets a list of the installed themes. |
def use_theme(theme, directory=None):
repo = require_repo(directory)
if theme not in list_themes(directory):
raise ThemeNotFoundError(theme)
old_theme = set_value(repo, 'theme', theme)
return old_theme != theme | Switches to the specified theme. This returns False if switching to the already active theme. |
def fill_opacity(value):
if value.value:
_assert_is_type('fill_opacity.value', value.value,
(float, int))
if value.value < 0 or value.value > 1:
raise ValueError(
'fill_opacity must be between 0 and 1') | ValueRef : int or float, opacity of the fill (0 to 1) |
def stroke_width(value):
if value.value:
_assert_is_type('stroke_width.value', value.value, int)
if value.value < 0:
raise ValueError('stroke width cannot be negative') | ValueRef : int, width of the stroke in pixels |
def stroke_opacity(value):
if value.value:
_assert_is_type('stroke_opacity.value', value.value,
(float, int))
if value.value < 0 or value.value > 1:
raise ValueError(
'stroke_opacity must be between 0 and 1') | ValueRef : number, opacity of the stroke (0 to 1) |
def size(value):
if value.value:
_assert_is_type('size.value', value.value, int)
if value.value < 0:
raise ValueError('size cannot be negative') | ValueRef : number, area of the mark in pixels
This is the total area of a symbol. For example, a value of 500 and
a ``shape`` of ``'circle'`` would result in circles with an area of
500 square pixels. Only used if ``type`` is ``'symbol'``. |
def shape(value):
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_shapes:
raise ValueError(value.value + ' is not a valid shape') | ValueRef : string, type of symbol to use
Possible values are ``'circle'`` (default), ``'square'``,
``'cross'``, ``'diamond'``, ``'triangle-up'``, and
``'triangle-down'``. Only used if ``type`` is ``'symbol'``. |
def interpolate(value):
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_methods:
raise ValueError(value.value + ' is not a valid method') | ValueRef : string, line interpolation method to use
Possible values for ``area`` types are `'linear'`,
``'step-before'``, ``'step-after'``, ``'basis'``, ``'basis-open'``,
``'cardinal'``, ``'cardinal-open'``, ``'monotone'``. ``line`` types
have all values for ``area`` as well as ``'basis... |
def align(value):
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_align:
raise ValueError(value.value + ' is not a valid alignment') | ValueRef : string, horizontal alignment of mark
Possible values are ``'left'``, ``'right'``, and ``'center'``. Only
used if ``type`` is ``'image'`` or ``'text'``. |
def baseline(value):
if value.value:
_assert_is_type('shape.value', value.value, str_types)
if value.value not in PropertySet._valid_baseline:
raise ValueError(value.value + ' is not a valid baseline') | ValueRef : string, vertical alignment of mark
Possible values are ``'top'``, ``'middle'``, and ``'bottom'``. Only
used if ``type`` is ``'image'`` or ``'text'``. |
def type(value):
valid_transforms = frozenset([
'array', 'copy', 'cross', 'facet', 'filter',
'flatten', 'fold', 'formula', 'slice', 'sort', 'stats',
'truncate', 'unique', 'window', 'zip', 'force', 'geo', 'geopath',
'link', 'pie', 'stack', 'treemap', 'word... | string: property name in which to store the computed transform
value.
The valid transform types are as follows:
'array', 'copy', 'cross', 'facet', 'filter', 'flatten', 'fold',
'formula', 'slice', 'sort', 'stats', 'truncate', 'unique', 'window',
'zip', 'force', 'geo', 'geopath', ... |
def data_type(data, grouped=False, columns=None, key_on='idx', iter_idx=None):
if iter_idx:
return Data.from_mult_iters(idx=iter_idx, **data)
if pd:
if isinstance(data, (pd.Series, pd.DataFrame)):
return Data.from_pandas(data, grouped=grouped, columns=columns,
... | Data type check for automatic import |
def rebind(self, column=None, brew='GnBu'):
self.data['table'] = Data.keypairs(
self.raw_data, columns=[self.data_key, column])
domain = [Data.serialize(self.raw_data[column].min()),
Data.serialize(self.raw_data[column].quantile(0.95))]
scale = Scale(name='... | Bind a new column to the data map
Parameters
----------
column: str, default None
Pandas DataFrame column name
brew: str, default None
Color brewer abbreviation. See colors.py |
def viewport(value):
if len(value) != 2:
raise ValueError('viewport must have 2 dimensions')
for v in value:
_assert_is_type('viewport dimension', v, int)
if v < 0:
raise ValueError('viewport dimensions cannot be negative') | 2-element list of ints : Dimensions of the viewport
The viewport is a bounding box containing the visualization. If the
dimensions of the visualization are larger than the viewport, then
the visualization will be scrollable.
If undefined, then the full visualization is shown. |
def padding(value):
if isinstance(value, dict):
required_keys = ['top', 'left', 'right', 'bottom']
for key in required_keys:
if key not in value:
error = ('Padding must have keys "{0}".'
.format('", "'.join(require... | int or dict : Padding around visualization
The padding defines the distance between the edge of the
visualization canvas to the visualization box. It does not count as
part of the visualization width/height. Values cannot be negative.
If a dict, padding must have all keys ``''top'``, `... |
def data(value):
for i, entry in enumerate(value):
_assert_is_type('data[{0}]'.format(i), entry, Data) | list or KeyedList of ``Data`` : Data definitions
This defines the data being visualized. See the :class:`Data` class
for details. |
def scales(value):
for i, entry in enumerate(value):
_assert_is_type('scales[{0}]'.format(i), entry, Scale) | list or KeyedList of ``Scale`` : Scale definitions
Scales map the data from the domain of the data to some
visualization space (such as an x-axis). See the :class:`Scale`
class for details. |
def axes(value):
for i, entry in enumerate(value):
_assert_is_type('axes[{0}]'.format(i), entry, Axis) | list or KeyedList of ``Axis`` : Axis definitions
Axes define the locations of the data being mapped by the scales.
See the :class:`Axis` class for details. |
def marks(value):
for i, entry in enumerate(value):
_assert_is_type('marks[{0}]'.format(i), entry, Mark) | list or KeyedList of ``Mark`` : Mark definitions
Marks are the visual objects (such as lines, bars, etc.) that
represent the data in the visualization space. See the :class:`Mark`
class for details. |
def legends(value):
for i, entry in enumerate(value):
_assert_is_type('legends[{0}]'.format(i), entry, Legend) | list or KeyedList of ``Legends`` : Legend definitions
Legends visualize scales, and take one or more scales as their input.
They can be customized via a LegendProperty object. |
def axis_titles(self, x=None, y=None):
keys = self.axes.get_keys()
if keys:
for key in keys:
if key == 'x':
self.axes[key].title = x
elif key == 'y':
self.axes[key].title = y
else:
self.axes.... | Apply axis titles to the figure.
This is a convenience method for manually modifying the "Axes" mark.
Parameters
----------
x: string, default 'null'
X-axis title
y: string, default 'null'
Y-axis title
Example
-------
>>>vis.axis... |
def _set_axis_properties(self, axis):
if not getattr(axis, 'properties'):
axis.properties = AxisProperties()
for prop in ['ticks', 'axis', 'major_ticks', 'minor_ticks',
'title', 'labels']:
setattr(axis.properties, prop, PropertySet()) | Set AxisProperties and PropertySets |
def _set_all_axis_color(self, axis, color):
for prop in ['ticks', 'axis', 'major_ticks', 'minor_ticks', 'title',
'labels']:
prop_set = getattr(axis.properties, prop)
if color and prop in ['title', 'labels']:
prop_set.fill = ValueRef(value=col... | Set axis ticks, title, labels to given color |
def _axis_properties(self, axis, title_size, title_offset, label_angle,
label_align, color):
if self.axes:
axis = [a for a in self.axes if a.scale == axis][0]
self._set_axis_properties(axis)
self._set_all_axis_color(axis, color)
i... | Assign axis properties |
def common_axis_properties(self, color=None, title_size=None):
if self.axes:
for axis in self.axes:
self._set_axis_properties(axis)
self._set_all_axis_color(axis, color)
if title_size:
ref = ValueRef(value=title_size)
... | Set common axis properties such as color
Parameters
----------
color: str, default None
Hex color str, etc |
def x_axis_properties(self, title_size=None, title_offset=None,
label_angle=None, label_align=None, color=None):
self._axis_properties('x', title_size, title_offset, label_angle,
label_align, color)
return self | Change x-axis title font size and label angle
Parameters
----------
title_size: int, default None
Title size, in px
title_offset: int, default None
Pixel offset from given axis
label_angle: int, default None
label angle in degrees
labe... |
def y_axis_properties(self, title_size=None, title_offset=None,
label_angle=None, label_align=None, color=None):
self._axis_properties('y', title_size, title_offset, label_angle,
label_align, color)
return self | Change y-axis title font size and label angle
Parameters
----------
title_size: int, default None
Title size, in px
title_offset: int, default None
Pixel offset from given axis
label_angle: int, default None
label angle in degrees
labe... |
def legend(self, title=None, scale='color', text_color=None):
self.legends.append(Legend(title=title, fill=scale, offset=0,
properties=LegendProperties()))
if text_color:
color_props = PropertySet(fill=ValueRef(value=text_color))
self.l... | Convience method for adding a legend to the figure.
Important: This defaults to the color scale that is generated with
Line, Area, Stacked Line, etc charts. For bar charts, the scale ref is
usually 'y'.
Parameters
----------
title: string, default None
Legen... |
def colors(self, brew=None, range_=None):
if brew:
self.scales['color'].range = brews[brew]
elif range_:
self.scales['color'].range = range_
return self | Convenience method for adding color brewer scales to charts with a
color scale, such as stacked or grouped bars.
See the colors here: http://colorbrewer2.org/
Or here: http://bl.ocks.org/mbostock/5577023
This assumes that a 'color' scale exists on your chart.
Parameters
... |
def validate(self, require_all=True, scale='colors'):
super(self.__class__, self).validate()
required_attribs = ('data', 'scales', 'axes', 'marks')
for elem in required_attribs:
attr = getattr(self, elem)
if attr:
# Validate each element of the se... | Validate the visualization contents.
Parameters
----------
require_all : boolean, default True
If True (default), then all fields ``data``, ``scales``,
``axes``, and ``marks`` must be defined. The user is allowed to
disable this if the intent is to define the... |
def _repr_html_(self):
vis_id = str(uuid4()).replace("-", "")
html = % (vis_id, self.to_json(pretty_print=False), vis_id)
return html | Build the HTML representation for IPython. |
def display(self):
from IPython.core.display import display, HTML
display(HTML(self._repr_html_())) | Display the visualization inline in the IPython notebook.
This is deprecated, use the following instead::
from IPython.display import display
display(viz) |
def validate(self, *args):
super(self.__class__, self).validate(*args)
if not self.name:
raise ValidationError('name is required for Data') | Validate contents of class |
def serialize(obj):
if isinstance(obj, str_types):
return obj
elif hasattr(obj, 'timetuple'):
return int(time.mktime(obj.timetuple())) * 1000
elif hasattr(obj, 'item'):
return obj.item()
elif hasattr(obj, '__float__'):
if isinstanc... | Convert an object into a JSON-serializable value
This is used by the ``from_pandas`` and ``from_numpy`` functions to
convert data to JSON-serializable types when loading. |
def from_pandas(cls, data, columns=None, key_on='idx', name=None,
series_key='data', grouped=False, records=False, **kwargs):
# Note: There's an experimental JSON encoder floating around in
# pandas land that hasn't made it into the main branch. This
# function shoul... | Load values from a pandas ``Series`` or ``DataFrame`` object
Parameters
----------
data : pandas ``Series`` or ``DataFrame``
Pandas object to import data from.
columns: list, default None
DataFrame columns to convert to Data. Keys default to col names.
... |
def from_numpy(cls, np_obj, name, columns, index=None, index_key=None,
**kwargs):
if not np:
raise LoadError('numpy could not be imported')
_assert_is_type('numpy object', np_obj, np.ndarray)
# Integer index if none is provided
index = index or ran... | Load values from a numpy array
Parameters
----------
np_obj : numpy.ndarray
numpy array to load data from
name : string
``name`` field for the data
columns : iterable
Sequence of column names, from left to right. Must have same
len... |
def from_mult_iters(cls, name=None, idx=None, **kwargs):
if not name:
name = 'table'
lengths = [len(v) for v in kwargs.values()]
if len(set(lengths)) != 1:
raise ValueError('Iterables must all be same length')
if not idx:
raise ValueError('Mus... | Load values from multiple iters
Parameters
----------
name : string, default None
Name of the data set. If None (default), the name will be set to
``'table'``.
idx: string, default None
Iterable to use for the data index
**kwargs : dict of ite... |
def from_iter(cls, data, name=None):
if not name:
name = 'table'
if isinstance(data, (list, tuple)):
data = {x: y for x, y in enumerate(data)}
values = [{'idx': k, 'col': 'data', 'val': v}
for k, v in sorted(data.items())]
return cls(nam... | Convenience method for loading data from an iterable.
Defaults to numerical indexing for x-axis.
Parameters
----------
data: iterable
An iterable of data (list, tuple, dict of key/val pairs)
name: string, default None
Name of the data set. If None (defau... |
def keypairs(cls, data, columns=None, use_index=False, name=None):
if not name:
name = 'table'
cls.raw_data = data
# Tuples
if isinstance(data, tuple):
values = [{"x": x[0], "y": x[1]} for x in data]
# Lists
elif isinstance(data, list):
... | This will format the data as Key: Value pairs, rather than the
idx/col/val style. This is useful for some transforms, and to
key choropleth map data
Standard Data Types:
List: [0, 10, 20, 30, 40]
Paired Tuples: ((0, 1), (0, 2), (0, 3))
Dict: {'A': 10, 'B': 20... |
def _numpy_to_values(data):
def to_list_no_index(xvals, yvals):
return [{"x": x, "y": np.asscalar(y)}
for x, y in zip(xvals, yvals)]
if len(data.shape) == 1 or data.shape[1] == 1:
xvals = range(data.shape[0] + 1)
values = to_list_no_index(... | Convert a NumPy array to values attribute |
def to_json(self, validate=False, pretty_print=True, data_path=None):
# TODO: support writing to separate file
return super(self.__class__, self).to_json(validate=validate,
pretty_print=pretty_print) | Convert data to JSON
Parameters
----------
data_path : string
If not None, then data is written to a separate file at the
specified path. Note that the ``url`` attribute if the data must
be set independently for the data to load correctly.
Returns
... |
def initialize_notebook():
try:
from IPython.core.display import display, HTML
except ImportError:
print("IPython Notebook could not be loaded.")
# Thanks to @jakevdp:
# https://github.com/jakevdp/mpld3/blob/master/mpld3/_display.py#L85
load_lib =
lib_urls = [
"'//c... | Initialize the IPython notebook display elements |
def _assert_is_type(name, value, value_type):
if not isinstance(value, value_type):
if type(value_type) is tuple:
types = ', '.join(t.__name__ for t in value_type)
raise ValueError('{0} must be one of ({1})'.format(name, types))
else:
raise ValueError('{0} mu... | Assert that a value must be a given type. |
def grammar(grammar_type=None, grammar_name=None):
def grammar_creator(validator, name):
def setter(self, value):
if isinstance(grammar_type, (type, tuple)):
_assert_is_type(validator.__name__, value, grammar_type)
validator(value)
self.grammar[name] ... | Decorator to define properties that map to the ``grammar``
dict. This dict is the canonical representation of the Vega grammar
within Vincent.
This decorator is intended for classes that map to some pre-defined JSON
structure, such as axes, data, marks, scales, etc. It is assumed that this
decorate... |
def validate(self):
for key, val in self.grammar.items():
try:
setattr(self, key, val)
except ValueError as e:
raise ValidationError('invalid contents: ' + e.args[0]) | Validate the contents of the object.
This calls ``setattr`` for each of the class's grammar properties. It
will catch ``ValueError``s raised by the grammar property's setters
and re-raise them as :class:`ValidationError`. |
def to_json(self, path=None, html_out=False,
html_path='vega_template.html', validate=False,
pretty_print=True):
if validate:
self.validate()
if pretty_print:
dumps_args = {'indent': 2, 'separators': (',', ': ')}
else:
... | Convert object to JSON
Parameters
----------
path: string, default None
Path to write JSON out. If there is no path provided, JSON
will be returned as a string to the console.
html_out: boolean, default False
If True, vincent will output an simple HTM... |
def useful_mimetype(text):
if text is None:
return False
mimetype = normalize_mimetype(text)
return mimetype not in [DEFAULT, PLAIN, None] | Check to see if the given mime type is a MIME type
which is useful in terms of how to treat this file. |
def normalize_extension(extension):
extension = decode_path(extension)
if extension is None:
return
if extension.startswith('.'):
extension = extension[1:]
if '.' in extension:
_, extension = os.path.splitext(extension)
extension = slugify(extension, sep='')
if exten... | Normalise a file name extension. |
def fetch(url: str, **kwargs) -> Selector:
kwargs.setdefault('headers', DEFAULT_HEADERS)
try:
res = requests.get(url, **kwargs)
res.raise_for_status()
except requests.RequestException as e:
print(e)
else:
html = res.text
tree = Selector(text=html)
ret... | Send HTTP request and parse it as a DOM tree.
Args:
url (str): The url of the site.
Returns:
Selector: allows you to select parts of HTML text using CSS or XPath expressions. |
async def async_fetch(url: str, **kwargs) -> Selector:
kwargs.setdefault('headers', DEFAULT_HEADERS)
async with aiohttp.ClientSession(**kwargs) as ses:
async with ses.get(url, **kwargs) as res:
html = await res.text()
tree = Selector(text=html)
return tree | Do the fetch in an async style.
Args:
url (str): The url of the site.
Returns:
Selector: allows you to select parts of HTML text using CSS or XPath expressions. |
def view(url: str, **kwargs) -> bool:
kwargs.setdefault('headers', DEFAULT_HEADERS)
html = requests.get(url, **kwargs).content
if b'<base' not in html:
repl = f'<head><base href="{url}">'
html = html.replace(b'<head>', repl.encode('utf-8'))
fd, fname = tempfile.mkstemp('.html')
... | View the page whether rendered properly. (ensure the <base> tag to make external links work)
Args:
url (str): The url of the site. |
def links(res: requests.models.Response,
search: str = None,
pattern: str = None) -> list:
hrefs = [link.to_text() for link in find_all_links(res.text)]
if search:
hrefs = [href for href in hrefs if search in href]
if pattern:
hrefs = [href for href in hrefs if re.fi... | Get the links of the page.
Args:
res (requests.models.Response): The response of the page.
search (str, optional): Defaults to None. Search the links you want.
pattern (str, optional): Defaults to None. Search the links use a regex pattern.
Returns:
list: All the links of the p... |
def save_as_json(total: list,
name='data.json',
sort_by: str = None,
no_duplicate=False,
order='asc'):
if sort_by:
reverse = order == 'desc'
total = sorted(total, key=itemgetter(sort_by), reverse=reverse)
if no_duplicate:
... | Save what you crawled as a json file.
Args:
total (list): Total of data you crawled.
name (str, optional): Defaults to 'data.json'. The name of the file.
sort_by (str, optional): Defaults to None. Sort items by a specific key.
no_duplicate (bool, optional): Defaults to False. If Tru... |
def cli():
argv = docopt(__doc__, version=VERSION)
if argv['genspider']:
name = f"{argv['<name>']}.py"
use_async = argv['--async']
template = 'data_async.tmpl' if use_async else 'data.tmpl'
package_dir = Path(__file__).parent
template_text = package_dir.joinpath('tem... | Commandline for looter :d |
def get_value_tuple(self):
retval = tuple()
for val in self.VALUES:
retval += (getattr(self, val),)
return retval | Returns a tuple of the color's values (in order). For example,
an LabColor object will return (lab_l, lab_a, lab_b), where each
member of the tuple is the float value for said variable. |
def set_observer(self, observer):
observer = str(observer)
if observer not in color_constants.OBSERVERS:
raise InvalidObserverError(self)
self.observer = observer | Validates and sets the color's observer angle.
.. note:: This only changes the observer angle value. It does no conversion
of the color's coordinates.
:param str observer: One of '2' or '10'. |
def set_illuminant(self, illuminant):
illuminant = illuminant.lower()
if illuminant not in color_constants.ILLUMINANTS[self.observer]:
raise InvalidIlluminantError(illuminant)
self.illuminant = illuminant | Validates and sets the color's illuminant.
.. note:: This only changes the illuminant. It does no conversion
of the color's coordinates. For this, you'll want to refer to
:py:meth:`XYZColor.apply_adaptation <colormath.color_objects.XYZColor.apply_adaptation>`.
.. tip:: Call thi... |
def get_illuminant_xyz(self, observer=None, illuminant=None):
try:
if observer is None:
observer = self.observer
illums_observer = color_constants.ILLUMINANTS[observer]
except KeyError:
raise InvalidObserverError(self)
try:
... | :param str observer: Get the XYZ values for another observer angle. Must
be either '2' or '10'.
:param str illuminant: Get the XYZ values for another illuminant.
:returns: the color's illuminant's XYZ values. |
def get_numpy_array(self):
# This holds the obect's spectral data, and will be passed to
# numpy.array() to create a numpy array (matrix) for the matrix math
# that will be done during the conversion to XYZ.
values = []
# Use the required value list to build this dynamic... | Dump this color into NumPy array. |
def calc_density(self, density_standard=None):
if density_standard is not None:
return density.ansi_density(self, density_standard)
else:
return density.auto_density(self) | Calculates the density of the SpectralColor. By default, Status T
density is used, and the correct density distribution (Red, Green,
or Blue) is chosen by comparing the Red, Green, and Blue components of
the spectral sample (the values being red in via "filters"). |
def apply_adaptation(self, target_illuminant, adaptation='bradford'):
logger.debug(" \- Original illuminant: %s", self.illuminant)
logger.debug(" \- Target illuminant: %s", target_illuminant)
# If the XYZ values were taken with a different reference white than the
# native ref... | This applies an adaptation matrix to change the XYZ color's illuminant.
You'll most likely only need this during RGB conversions. |
def _clamp_rgb_coordinate(self, coord):
if not self.is_upscaled:
return min(max(coord, 0.0), 1.0)
else:
return min(max(coord, 1), 255) | Clamps an RGB coordinate, taking into account whether or not the
color is upscaled or not.
:param float coord: The coordinate value.
:rtype: float
:returns: The clamped value. |
def get_upscaled_value_tuple(self):
# Scale up to 0-255 values.
rgb_r = int(math.floor(0.5 + self.rgb_r * 255))
rgb_g = int(math.floor(0.5 + self.rgb_g * 255))
rgb_b = int(math.floor(0.5 + self.rgb_b * 255))
return rgb_r, rgb_g, rgb_b | Scales an RGB color object from decimal 0.0-1.0 to int 0-255. |
def get_rgb_hex(self):
rgb_r, rgb_g, rgb_b = self.get_upscaled_value_tuple()
return '#%02x%02x%02x' % (rgb_r, rgb_g, rgb_b) | Converts the RGB value to a hex value in the form of: #RRGGBB
:rtype: str |
def new_from_rgb_hex(cls, hex_str):
colorstring = hex_str.strip()
if colorstring[0] == '#':
colorstring = colorstring[1:]
if len(colorstring) != 6:
raise ValueError("input #%s is not in #RRGGBB format" % colorstring)
r, g, b = colorstring[:2], colorstring... | Converts an RGB hex string like #RRGGBB and assigns the values to
this sRGBColor object.
:rtype: sRGBColor |
def delta_e_cie1976(lab_color_vector, lab_color_matrix):
return numpy.sqrt(
numpy.sum(numpy.power(lab_color_vector - lab_color_matrix, 2), axis=1)) | Calculates the Delta E (CIE1976) between `lab_color_vector` and all
colors in `lab_color_matrix`. |
def delta_e_cie1994(lab_color_vector, lab_color_matrix,
K_L=1, K_C=1, K_H=1, K_1=0.045, K_2=0.015):
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color... | Calculates the Delta E (CIE1994) of two colors.
K_l:
0.045 graphic arts
0.048 textiles
K_2:
0.015 graphic arts
0.014 textiles
K_L:
1 default
2 textiles |
def delta_e_cmc(lab_color_vector, lab_color_matrix, pl=2, pc=1):
L, a, b = lab_color_vector
C_1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C_2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
delta_lab = lab_color_vector - lab_color_matrix
delta_L = de... | Calculates the Delta E (CIE1994) of two colors.
CMC values
Acceptability: pl=2, pc=1
Perceptability: pl=1, pc=1 |
def delta_e_cie2000(lab_color_vector, lab_color_matrix, Kl=1, Kc=1, Kh=1):
L, a, b = lab_color_vector
avg_Lp = (L + lab_color_matrix[:, 0]) / 2.0
C1 = numpy.sqrt(numpy.sum(numpy.power(lab_color_vector[1:], 2)))
C2 = numpy.sqrt(numpy.sum(numpy.power(lab_color_matrix[:, 1:], 2), axis=1))
avg_C1_C... | Calculates the Delta E (CIE2000) of two colors. |
def ansi_density(color, density_standard):
# Load the spec_XXXnm attributes into a Numpy array.
sample = color.get_numpy_array()
# Matrix multiplication
intermediate = sample * density_standard
# Sum the products.
numerator = intermediate.sum()
# This is the denominator in the density e... | Calculates density for the given SpectralColor using the spectral weighting
function provided. For example, ANSI_STATUS_T_RED. These may be found in
:py:mod:`colormath.density_standards`.
:param SpectralColor color: The SpectralColor object to calculate
density for.
:param numpy.ndarray density... |
def auto_density(color):
blue_density = ansi_density(color, ANSI_STATUS_T_BLUE)
green_density = ansi_density(color, ANSI_STATUS_T_GREEN)
red_density = ansi_density(color, ANSI_STATUS_T_RED)
densities = [blue_density, green_density, red_density]
min_density = min(densities)
max_density = max... | Given a SpectralColor, automatically choose the correct ANSI T filter.
Returns a tuple with a string representation of the filter the
calculated density.
:param SpectralColor color: The SpectralColor object to calculate
density for.
:rtype: float
:returns: The density value, with the filter... |
def _get_lab_color1_vector(color):
if not color.__class__.__name__ == 'LabColor':
raise ValueError(
"Delta E functions can only be used with two LabColor objects.")
return numpy.array([color.lab_l, color.lab_a, color.lab_b]) | Converts an LabColor into a NumPy vector.
:param LabColor color:
:rtype: numpy.ndarray |
def delta_e_cie1976(color1, color2):
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cie1976(color1_vector, color2_matrix)[0]
return numpy.asscalar(delta_e) | Calculates the Delta E (CIE1976) of two colors. |
def delta_e_cie1994(color1, color2, K_L=1, K_C=1, K_H=1, K_1=0.045, K_2=0.015):
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cie1994(
color1_vector, color2_matrix, K_L=K_L, K_C=K_C, K_H=K_H, K_1=K_1, K_2=K_2)[0... | Calculates the Delta E (CIE1994) of two colors.
K_l:
0.045 graphic arts
0.048 textiles
K_2:
0.015 graphic arts
0.014 textiles
K_L:
1 default
2 textiles |
def delta_e_cie2000(color1, color2, Kl=1, Kc=1, Kh=1):
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cie2000(
color1_vector, color2_matrix, Kl=Kl, Kc=Kc, Kh=Kh)[0]
return numpy.asscalar(delta_e) | Calculates the Delta E (CIE2000) of two colors. |
def delta_e_cmc(color1, color2, pl=2, pc=1):
color1_vector = _get_lab_color1_vector(color1)
color2_matrix = _get_lab_color2_matrix(color2)
delta_e = color_diff_matrix.delta_e_cmc(
color1_vector, color2_matrix, pl=pl, pc=pc)[0]
return numpy.asscalar(delta_e) | Calculates the Delta E (CMC) of two colors.
CMC values
Acceptability: pl=2, pc=1
Perceptability: pl=1, pc=1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.