_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3
values | text stringlengths 31 13.1k | language stringclasses 1
value | meta_information dict |
|---|---|---|---|---|---|
q18600 | Logger.warning | train | def warning(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'WARNING'.
To pass exception information, use the | python | {
"resource": ""
} |
q18601 | Logger.error | train | def error(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'ERROR'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g. | python | {
"resource": ""
} |
q18602 | Logger.critical | train | def critical(self, msg, *args, **kwargs) -> Task: # type: ignore
"""
Log msg with severity 'CRITICAL'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g. | python | {
"resource": ""
} |
q18603 | Logger.exception | train | def exception( # type: ignore
self, msg, *args, exc_info=True, **kwargs
) -> Task:
"""
| python | {
"resource": ""
} |
q18604 | validate_aggregation | train | def validate_aggregation(agg):
"""Validate an aggregation for use in Vega-Lite.
Translate agg to one of the following supported named aggregations:
['mean', 'sum', 'median', 'min', 'max', 'count']
Parameters
----------
agg : string or callable
A string
Supported reductions are ['mean', 'sum', 'median', 'min', 'max', 'count'].
If agg is a numpy function, the return value is the string representation.
If agg is unrecognized, raise a ValueError
"""
if agg is None:
return agg
supported_aggs = ['mean', 'sum', 'median', 'min', 'max', 'count']
numpy_aggs = {getattr(np, a): a
| python | {
"resource": ""
} |
q18605 | andrews_curves | train | def andrews_curves(
data, class_column, samples=200, alpha=None, width=450, height=300, **kwds
):
"""
Generates an Andrews curves visualization for visualising clusters of
multivariate data.
Andrews curves have the functional form:
f(t) = x_1/sqrt(2) + x_2 sin(t) + x_3 cos(t) +
x_4 sin(2t) + x_5 cos(2t) + ...
Where x coefficients correspond to the values of each dimension and t is
linearly spaced between -pi and +pi. Each row of frame then corresponds to
a single curve.
Parameters:
-----------
data : DataFrame
Data to be plotted, preferably normalized to (0.0, 1.0)
class_column : string
Name of the column containing class names
samples : integer
Number of points to plot in each curve
alpha: float, optional
The transparency of the lines
width : int, optional
the width of the plot in pixels
height : int, optional
the height of the plot in pixels
**kwds: keywords
Additional options
Returns:
--------
chart: alt.Chart object
"""
if kwds:
warnings.warn(
"Unrecognized keywords in pdvega.andrews_curves(): {0}"
"".format(list(kwds.keys()))
)
t = np.linspace(-np.pi, np.pi, samples)
vals = data.drop(class_column, axis=1).values.T
curves = np.outer(vals[0], np.ones_like(t))
for i in range(1, len(vals)):
ft = ((i + 1) // 2) | python | {
"resource": ""
} |
q18606 | dict_hash | train | def dict_hash(dct):
"""Return a hash of the contents of a dictionary"""
dct_s = json.dumps(dct, sort_keys=True)
try:
m = md5(dct_s) | python | {
"resource": ""
} |
q18607 | exec_then_eval | train | def exec_then_eval(code, namespace=None):
"""Exec a code block & return evaluation of the last line"""
# TODO: make this less brittle.
namespace = namespace or {}
block = ast.parse(code, mode='exec')
last = | python | {
"resource": ""
} |
q18608 | import_obj | train | def import_obj(clsname, default_module=None):
"""
Import the object given by clsname.
If default_module is specified, import from this module.
"""
if default_module is not None:
if not clsname.startswith(default_module + '.'): | python | {
"resource": ""
} |
q18609 | get_scheme_cartocss | train | def get_scheme_cartocss(column, scheme_info):
"""Get TurboCARTO CartoCSS based on input parameters"""
if 'colors' in scheme_info:
color_scheme = '({})'.format(','.join(scheme_info['colors']))
else:
color_scheme = 'cartocolor({})'.format(scheme_info['name'])
if not isinstance(scheme_info['bins'], int):
bins = ','.join(str(i) for i in scheme_info['bins'])
else:
bins = scheme_info['bins']
bin_method = | python | {
"resource": ""
} |
q18610 | custom | train | def custom(colors, bins=None, bin_method=BinMethod.quantiles):
"""Create a custom scheme.
Args:
colors (list of str): List of hex values for styling data
bins (int, optional): Number of bins to style by. If not given, the
number of colors will be used.
bin_method (str, optional): Classification method. One of the values
| python | {
"resource": ""
} |
q18611 | scheme | train | def scheme(name, bins, bin_method='quantiles'):
"""Return a custom scheme based on CARTOColors.
Args:
name (str): Name of a CARTOColor.
bins (int or iterable): If an `int`, the number of bins for classifying
data. CARTOColors have 7 bins max for quantitative data, and 11 max
for qualitative data. If `bins` is a `list`, it is the upper range
for classifying data. E.g., `bins` can be of the form ``(10, 20, 30,
40, 50)``.
bin_method (str, optional): One of methods in :obj:`BinMethod`.
Defaults to ``quantiles``. If `bins` is an interable, then that is
the bin method that | python | {
"resource": ""
} |
q18612 | CartoContext._is_authenticated | train | def _is_authenticated(self):
"""Checks if credentials allow for authenticated carto access"""
if not self.auth_api_client.is_valid_api_key():
| python | {
"resource": ""
} |
q18613 | CartoContext.read | train | def read(self, table_name, limit=None, decode_geom=False, shared_user=None, retry_times=3):
"""Read a table from CARTO into a pandas DataFrames.
Args:
table_name (str): Name of table in user's CARTO account.
limit (int, optional): Read only `limit` lines from
`table_name`. Defaults to ``None``, which reads the full table.
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__.
shared_user (str, optional): If a table has been shared with you,
specify the user name (schema) who shared it.
retry_times (int, optional): If the read call is rate limited,
number of retries to be made
Returns:
pandas.DataFrame: DataFrame representation of `table_name` from
CARTO.
Example:
| python | {
"resource": ""
} |
q18614 | CartoContext.tables | train | def tables(self):
"""List all tables in user's CARTO account
Returns:
:obj:`list` of :py:class:`Table <cartoframes.analysis.Table>`
"""
datasets = DatasetManager(self.auth_client).filter(
show_table_size_and_row_count='false',
| python | {
"resource": ""
} |
q18615 | CartoContext.write | train | def write(self, df, table_name, temp_dir=CACHE_DIR, overwrite=False,
lnglat=None, encode_geom=False, geom_col=None, **kwargs):
"""Write a DataFrame to a CARTO table.
Examples:
Write a pandas DataFrame to CARTO.
.. code:: python
cc.write(df, 'brooklyn_poverty', overwrite=True)
Scrape an HTML table from Wikipedia and send to CARTO with content
guessing to create a geometry from the country column. This uses
a CARTO Import API param `content_guessing` parameter.
.. code:: python
url = 'https://en.wikipedia.org/wiki/List_of_countries_by_life_expectancy'
# retrieve first HTML table from that page
df = pd.read_html(url, header=0)[0]
# send to carto, let it guess polygons based on the 'country'
# column. Also set privacy to 'public'
cc.write(df, 'life_expectancy',
content_guessing=True,
privacy='public')
cc.map(layers=Layer('life_expectancy',
color='both_sexes_life_expectancy'))
Args:
df (pandas.DataFrame): DataFrame to write to ``table_name`` in user
CARTO account
table_name (str): Table to write ``df`` to in CARTO.
temp_dir (str, optional): Directory for temporary storage of data
that is sent to CARTO. Defaults are defined by `appdirs
<https://github.com/ActiveState/appdirs/blob/master/README.rst>`__.
overwrite (bool, optional): Behavior for overwriting ``table_name``
if it exits on CARTO. Defaults to ``False``.
lnglat (tuple, optional): lng/lat pair that can be used for
creating a geometry on CARTO. Defaults to ``None``. In some
cases, geometry will be created without specifying this. See
CARTO's `Import API
<https://carto.com/developers/import-api/reference/#tag/Standard-Tables>`__
for more information.
encode_geom (bool, optional): Whether to write `geom_col` to CARTO
as `the_geom`.
geom_col (str, optional): The name of the column where geometry
information is stored. Used in conjunction with `encode_geom`.
**kwargs: Keyword arguments to control write operations. Options
are:
- `compression` to set compression for files sent to CARTO.
| python | {
"resource": ""
} |
q18616 | CartoContext._get_privacy | train | def _get_privacy(self, table_name):
"""gets current privacy of a table"""
ds_manager = DatasetManager(self.auth_client)
| python | {
"resource": ""
} |
q18617 | CartoContext._update_privacy | train | def _update_privacy(self, table_name, privacy):
"""Updates the privacy of a dataset"""
| python | {
"resource": ""
} |
q18618 | CartoContext.fetch | train | def fetch(self, query, decode_geom=False):
"""Pull the result from an arbitrary SELECT SQL query from a CARTO account
into a pandas DataFrame.
Args:
query (str): SELECT query to run against CARTO user database. This data
will then be converted into a pandas DataFrame.
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__.
Returns:
pandas.DataFrame: DataFrame representation of query supplied.
Pandas data types are inferred from PostgreSQL data types.
In the case of PostgreSQL date types, dates are attempted to be
converted, but on failure a data type 'object' is used.
Examples:
This query gets the 10 highest values from a table and
returns a dataframe.
.. code:: python
topten_df = cc.query(
'''
SELECT * FROM
my_table
ORDER BY value_column DESC
LIMIT 10
'''
)
This query joins points to polygons based on intersection, and
aggregates by summing the values of the points in each polygon. The
query returns a dataframe, with a geometry column that contains
polygons.
.. code:: python
points_aggregated_to_polygons = cc.query(
'''
SELECT polygons.*, sum(points.values)
| python | {
"resource": ""
} |
q18619 | CartoContext.query | train | def query(self, query, table_name=None, decode_geom=False, is_select=None):
"""Pull the result from an arbitrary SQL SELECT query from a CARTO account
into a pandas DataFrame. This is the default behavior, when `is_select=True`
Can also be used to perform database operations (creating/dropping tables,
adding columns, updates, etc.). In this case, you have to explicitly
specify `is_select=False`
This method is a helper for the `CartoContext.fetch` and `CartoContext.execute`
methods. We strongly encourage you to use any of those methods depending on the
type of query you want to run. If you want to get the results of a `SELECT` query
into a pandas DataFrame, then use `CartoContext.fetch`. For any other query that
performs an operation into the CARTO database, use `CartoContext.execute`
Args:
query (str): Query to run against CARTO user database. This data
will then be converted into a pandas DataFrame.
table_name (str, optional): If set (and `is_select=True`), this will create a new
table in the user's CARTO account that is the result of the SELECT
query provided. Defaults to None (no table created).
decode_geom (bool, optional): Decodes CARTO's geometries into a
`Shapely <https://github.com/Toblerity/Shapely>`__
object that can be used, for example, in `GeoPandas
<http://geopandas.org/>`__. It only works for SELECT queries when `is_select=True`
is_select (bool, optional): This argument has to be set depending on the query
performed. True for SELECT queries, False for any other query.
For the case of a SELECT SQL query (`is_select=True`) the result will be stored into a
pandas DataFrame.
When an arbitrary SQL query (`is_select=False`) it will perform a database
operation (UPDATE, DROP, INSERT, etc.)
By default `is_select=None` that means that the method will return a dataframe if
the `query` starts with a `select` clause, otherwise it will just execute the query
and return `None`
Returns:
pandas.DataFrame: When `is_select=True` and the query is actually a SELECT query
this method returns a pandas DataFrame representation of query supplied otherwise
returns None.
Pandas data types are inferred from PostgreSQL data types.
In the case of PostgreSQL date types, dates are attempted to be
converted, but on failure a data type 'object' is used.
Raises:
CartoException: If there's any error when executing the query
Examples:
Query a table in CARTO and write a new table that is result of
query. This query gets the 10 highest values from a table and
returns a dataframe, as well as creating a new table called
'top_ten' in the CARTO account.
.. code:: python
topten_df = cc.query(
'''
SELECT * FROM
my_table
ORDER BY value_column DESC
LIMIT 10
''',
table_name='top_ten'
)
This query joins points to polygons based on intersection, and
aggregates by summing the values of the points in each polygon. The
| python | {
"resource": ""
} |
q18620 | CartoContext._check_query | train | def _check_query(self, query, style_cols=None):
"""Checks if query from Layer or QueryLayer is valid"""
try:
self.sql_client.send(
utils.minify_sql((
'EXPLAIN',
'SELECT',
' {style_cols}{comma}',
' the_geom, the_geom_webmercator',
'FROM ({query}) _wrap;',
)).format(query=query,
comma=',' if style_cols else '',
style_cols=(','.join(style_cols)
if style_cols else '')),
do_post=False)
except Exception as err:
raise ValueError(('Layer query `{query}` and/or | python | {
"resource": ""
} |
q18621 | CartoContext._get_bounds | train | def _get_bounds(self, layers):
"""Return the bounds of all data layers involved in a cartoframes map.
Args:
layers (list): List of cartoframes layers. See `cartoframes.layers`
for all types.
Returns:
dict: Dictionary of northern, southern, eastern, and western bounds
of the superset of data layers. Keys are `north`, `south`,
`east`, and `west`. Units are in WGS84.
"""
extent_query = ('SELECT ST_EXTENT(the_geom) AS the_geom '
'FROM ({query}) AS t{idx}\n')
union_query = 'UNION ALL\n'.join(
[extent_query.format(query=layer.orig_query, idx=idx)
for idx, layer in enumerate(layers)
if not layer.is_basemap])
extent = self.sql_client.send(
utils.minify_sql((
'SELECT',
| python | {
"resource": ""
} |
q18622 | vmap | train | def vmap(layers,
context,
size=None,
basemap=BaseMaps.voyager,
bounds=None,
viewport=None,
**kwargs):
"""CARTO VL-powered interactive map
Args:
layers (list of Layer-types): List of layers. One or more of
:py:class:`Layer <cartoframes.contrib.vector.Layer>`,
:py:class:`QueryLayer <cartoframes.contrib.vector.QueryLayer>`, or
:py:class:`LocalLayer <cartoframes.contrib.vector.LocalLayer>`.
context (:py:class:`CartoContext <cartoframes.context.CartoContext>`):
A :py:class:`CartoContext <cartoframes.context.CartoContext>`
instance
size (tuple of int or str): a (width, height) pair for the size of the map.
Default is None, which makes the map 100% wide and 640px tall. If specified as int,
will be used as pixels, but you can also use string values for the CSS attributes.
So, you could specify it as size=('75%', 250).
basemap (str):
- if a `str`, name of a CARTO vector basemap. One of `positron`,
`voyager`, or `darkmatter` from the :obj:`BaseMaps` class
- if a `dict`, Mapbox or other style as the value of the `style` key.
If a Mapbox style, the access token is the value of the `token`
key.
bounds (dict or list): a dict with `east`,`north`,`west`,`south`
properties, or a list of floats in the following order: [west,
south, east, north]. If not provided the bounds will be automatically
calculated to fit all features.
viewport (dict): Configure where and how map will be centered. If not specified, or
specified without lat / lng, automatic bounds or the bounds argument will be used
to center the map. You can specify only zoom, bearing or pitch if you desire
automatic bounds but want to tweak the viewport.
- lng (float): Longitude to center the map on. Must specify lat as well.
- lat (float): Latitude to center the map on. Must specify lng as well.
- zoom (float): Zoom level.
- bearing (float): A bearing, or heading, is the direction you're facing,
measured clockwise as an angle from true north on a compass.
(north is 0, east is 90, south is 180, and west is 270).
- pitch (float): The angle towards the horizon measured in degrees, with a
range between 0 and 60 degrees. Zero degrees results in a two-dimensional
map, as if your line of sight forms a perpendicular angle with
the earth's surface.
Example:
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://your_user_name.carto.com',
api_key='your api key'
)
vector.vmap([vector.Layer('table in your account'), ], cc)
CARTO basemap style.
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://your_user_name.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
basemap=vector.BaseMaps.darkmatter
)
Custom basemap style. Here we use the Mapbox streets style, which
requires an access token.
.. code::
from cartoframes.contrib import vector
from cartoframes import CartoContext
cc = CartoContext(
base_url='https://<username>.carto.com',
api_key='your api key'
)
vector.vmap(
[vector.Layer('table in your account'), ],
context=cc,
| python | {
"resource": ""
} |
q18623 | _get_bounds_local | train | def _get_bounds_local(layers):
"""Aggregates bounding boxes of all local layers
return: dict of bounding box of all bounds in layers
"""
if not layers:
return {'west': None, 'south': None, 'east': None, 'north': None}
bounds = layers[0].bounds
for layer in layers[1:]:
bounds = np.concatenate(
(
np.minimum(
bounds[:2],
| python | {
"resource": ""
} |
q18624 | _combine_bounds | train | def _combine_bounds(bbox1, bbox2):
"""Takes two bounding boxes dicts and gives a new bbox that encompasses
them both"""
WORLD = {'west': -180, 'south': -85.1, 'east': 180, 'north': 85.1}
ALL_KEYS = set(WORLD.keys())
def dict_all_nones(bbox_dict):
"""Returns True if all dict values are None"""
return all(v is None for v in bbox_dict.values())
# if neither are defined, use the world
if not bbox1 and not bbox2:
return WORLD
# if all nones, use the world
if dict_all_nones(bbox1) and dict_all_nones(bbox2):
return WORLD
assert ALL_KEYS == set(bbox1.keys()) and ALL_KEYS == set(bbox2.keys()),\
'Input bounding boxes must have the same dictionary keys'
# create dict with cardinal directions and None-valued keys
outbbox = dict.fromkeys(['west', 'south', 'east', 'north'])
def conv2nan(val):
| python | {
"resource": ""
} |
q18625 | QueryLayer._compose_style | train | def _compose_style(self):
"""Appends `prop` with `style` to layer styling"""
valid_styles = (
'color', 'width', 'filter', 'strokeWidth', 'strokeColor',
| python | {
"resource": ""
} |
q18626 | QueryLayer._set_interactivity | train | def _set_interactivity(self, interactivity):
"""Adds interactivity syntax to the styling"""
event_default = 'hover'
if interactivity is None:
return
if isinstance(interactivity, (tuple, list)):
self.interactivity = event_default
interactive_cols = '\n'.join(
'@{0}: ${0}'.format(col) for col in interactivity
)
elif isinstance(interactivity, str):
self.interactivity = event_default
interactive_cols = '@{0}: ${0}'.format(interactivity)
elif isinstance(interactivity, dict):
self.interactivity = interactivity.get('event', event_default)
self.header = | python | {
"resource": ""
} |
q18627 | get_map_name | train | def get_map_name(layers, has_zoom):
"""Creates a map named based on supplied parameters"""
version = '20170406'
num_layers = len(non_basemap_layers(layers))
has_labels = len(layers) > 1 and layers[-1].is_basemap
has_time = has_time_layer(layers)
basemap_id = dict(light=0, dark=1, voyager=2)[layers[0].source]
return ('cartoframes_ver{version}'
'_layers{layers}'
'_time{has_time}'
'_baseid{baseid}'
'_labels{has_labels}'
'_zoom{has_zoom}').format(
| python | {
"resource": ""
} |
q18628 | get_map_template | train | def get_map_template(layers, has_zoom):
"""Creates a map template based on custom parameters supplied"""
num_layers = len(non_basemap_layers(layers))
has_time = has_time_layer(layers)
name = get_map_name(layers, has_zoom=has_zoom)
# Add basemap layer
layers_field = [{
'type': 'http',
'options': {
# TODO: Remove this once baselayer urls can be passed in named map
# config
'urlTemplate': layers[0].url,
# 'urlTemplate': '<%= basemap_url %>',
'subdomains': "abcd",
},
}]
# [BUG] Remove this once baselayer urls can be passed in named map config
placeholders = {}
# placeholders = {
# 'basemap_url': {
# 'type': 'sql_ident',
# 'default': ('https://cartodb-basemaps-{s}.global.ssl.fastly.net/'
# 'dark_all/{z}/{x}/{y}.png'),
# },
# }
for idx in range(num_layers):
layers_field.extend([{
'type': ('torque' if (has_time and idx == (num_layers - 1))
else 'mapnik'),
'options': {
'cartocss_version': '2.1.1',
'cartocss': '<%= cartocss_{idx} %>'.format(idx=idx),
'sql': '<%= sql_{idx} %>'.format(idx=idx),
# [BUG] No [] for templating
# 'interactivity': '<%= interactivity_{idx} %>'.format(
# idx=idx),
}
}])
placeholders.update({
'cartocss_{idx}'.format(idx=idx): {
'type': 'sql_ident',
'default': ('#layer {'
' marker-fill: red;'
' marker-width: 5;'
' marker-allow-overlap: true;'
' marker-line-color: #000; }'),
},
'sql_{idx}'.format(idx=idx): {
'type': 'sql_ident',
'default': (
"SELECT "
"ST_PointFromText('POINT(0 0)', 4326) AS the_geom, "
"1 AS cartodb_id, "
"ST_PointFromText('Point(0 0)', 3857) AS "
"the_geom_webmercator"
),
},
# [BUG] No [] for templating
# 'interactivity_{idx}'.format(idx=idx): {
# 'type': 'sql_ident',
# 'default': '["cartodb_id"]',
# },
})
# Add labels if they're in front
if num_layers > 0 and layers[-1].is_basemap:
layers_field.extend([{
'type': 'http',
'options': {
# TODO: Remove this once baselayer urls can be passed in named
# map config
'urlTemplate': layers[-1].url,
# 'urlTemplate': '<%= basemap_url %>',
'subdomains': "abcd",
},
}])
if has_zoom:
| python | {
"resource": ""
} |
q18629 | QueryLayer._parse_color | train | def _parse_color(self, color):
"""Setup the color scheme"""
# If column was specified, force a scheme
# It could be that there is a column named 'blue' for example
if isinstance(color, dict):
if 'column' not in color:
raise ValueError("Color must include a 'column' value")
# get scheme if exists. if not, one will be chosen later if needed
scheme = color.get('scheme')
color = color['column']
self.style_cols[color] = None
elif (color and
color[0] != '#' | python | {
"resource": ""
} |
q18630 | QueryLayer._parse_time | train | def _parse_time(self, time):
"""Parse time inputs"""
if time is None:
return None
if isinstance(time, dict):
if 'column' not in time:
raise ValueError("`time` must include a 'column' key/value")
time_column = time['column']
time_options = time
elif isinstance(time, str):
time_column = time
time_options = {}
else:
raise ValueError(
'`time` should be | python | {
"resource": ""
} |
q18631 | QueryLayer._parse_size | train | def _parse_size(self, size, has_time=False):
"""Parse size inputs"""
if has_time:
size = size or 4
else:
size = size or 10
if isinstance(size, str):
size = {'column': size}
if isinstance(size, dict):
if 'column' not in size:
raise ValueError("`size` must include a 'column' key/value")
if has_time:
raise ValueError("When time is specified, size can "
"only be a fixed size")
old_size = size
# Default size range, bins, and bin_method
size = {
'range': [5, 25],
'bins': 5,
'bin_method': BinMethod.quantiles,
}
# Assign default range and update if min/max given
| python | {
"resource": ""
} |
q18632 | QueryLayer._validate_columns | train | def _validate_columns(self):
"""Validate the options in the styles"""
geom_cols = {'the_geom', 'the_geom_webmercator', }
col_overlap = set(self.style_cols) & geom_cols
if col_overlap:
raise ValueError('Style columns cannot be geometry '
| python | {
"resource": ""
} |
q18633 | QueryLayer._setup | train | def _setup(self, layers, layer_idx):
"""Setups layers once geometry types and data types are known, and when
a map is requested to be rendered from zero or more data layers"""
basemap = layers[0]
# if color not specified, choose a default
if self.time:
# default time/torque color
self.color = self.color or '#2752ff'
else:
self.color = self.color or DEFAULT_COLORS[layer_idx]
if isinstance(self.size, (int, float)):
if self.geom_type == 'point':
self.size = self.size or 4
| python | {
"resource": ""
} |
q18634 | QueryLayer._choose_scheme | train | def _choose_scheme(self):
"""Choose color scheme"""
if self.style_cols[self.color] in ('string', 'boolean', ):
self.scheme = antique(10)
elif self.style_cols[self.color] in ('number', ):
self.scheme = mint(5)
elif self.style_cols[self.color] in ('date', 'geometry', ):
| python | {
"resource": ""
} |
q18635 | normalize_names | train | def normalize_names(column_names):
"""Given an arbitrary column name, translate to a SQL-normalized column
name a la CARTO's Import API will translate to
Examples
* 'Field: 2' -> 'field_2'
* '2 Items' -> '_2_items'
* 'Unnamed: 0' -> 'unnamed_0',
* '201moore' -> '_201moore',
* '201moore' -> '_201moore_1',
* 'Acadia 1.2.3' -> 'acadia_1_2_3',
* 'old_soaker' -> 'old_soaker',
* '_testingTesting' -> '_testingtesting',
* 1 -> '_1',
* 1.0 -> '_1_0',
* 'public' -> 'public',
* 'SELECT' -> '_select',
* 'à' -> 'a',
* 'longcolumnshouldbesplittedsomehowanditellyouwhereitsgonnabesplittedrightnow' -> \
| python | {
"resource": ""
} |
q18636 | cssify | train | def cssify(css_dict):
"""Function to get CartoCSS from Python dicts"""
css = ''
for key, value in dict_items(css_dict):
css += '{key} {{ '.format(key=key)
for field, field_value in dict_items(value):
| python | {
"resource": ""
} |
q18637 | temp_ignore_warnings | train | def temp_ignore_warnings(func):
"""Temporarily ignores warnings like those emitted by the carto python sdk
"""
@wraps(func)
def wrapper(*args, **kwargs):
"""wrapper around func to filter/reset warnings"""
with catch_warnings():
| python | {
"resource": ""
} |
q18638 | get_columns | train | def get_columns(context, query):
"""Get list of cartoframes.columns.Column"""
table_info = context.sql_client.send(query)
if 'fields' in table_info:
| python | {
"resource": ""
} |
q18639 | get_column_names | train | def get_column_names(context, query):
"""Get column names and types from a query"""
table_info = context.sql_client.send(query)
if 'fields' in | python | {
"resource": ""
} |
q18640 | _encode_decode_decorator | train | def _encode_decode_decorator(func):
"""decorator for encoding and decoding geoms"""
def wrapper(*args):
"""error catching"""
try:
processed_geom = func(*args)
return processed_geom
except ImportError as err:
| python | {
"resource": ""
} |
q18641 | _decode_geom | train | def _decode_geom(ewkb):
"""Decode encoded wkb into a shapely geometry
"""
# it's already a shapely object
if hasattr(ewkb, 'geom_type'):
return ewkb
from shapely import wkb
from shapely import wkt
if ewkb:
try:
return wkb.loads(ba.unhexlify(ewkb))
except Exception:
| python | {
"resource": ""
} |
q18642 | Dataset.exists | train | def exists(self):
"""Checks to see if table exists"""
try:
self.cc.sql_client.send(
'EXPLAIN SELECT * FROM "{table_name}"'.format(
table_name=self.table_name),
do_post=False)
| python | {
"resource": ""
} |
q18643 | Credentials.save | train | def save(self, config_loc=None):
"""Saves current user credentials to user directory.
Args:
config_loc (str, optional): Location where credentials are to be
stored. If no argument is provided, it will be send to the
default location.
Example:
.. code::
from cartoframes import Credentials
creds = Credentials(username='eschbacher', key='abcdefg')
creds.save() # save to default location
""" | python | {
"resource": ""
} |
q18644 | Credentials._retrieve | train | def _retrieve(self, config_file=None):
"""Retrives credentials from a file. Defaults to the user config
directory"""
with open(config_file or | python | {
"resource": ""
} |
q18645 | Credentials.delete | train | def delete(self, config_file=None):
"""Deletes the credentials file specified in `config_file`. If no
file is specified, it deletes the default user credential file.
Args:
config_file (str): Path to configuration file. Defaults to delete
the user default location if `None`.
.. Tip::
To see if there is a default user credential file stored, do the
following::
>>> creds = Credentials()
>>> print(creds)
Credentials(username=eschbacher, | python | {
"resource": ""
} |
q18646 | Credentials.set | train | def set(self, key=None, username=None, base_url=None):
"""Update the credentials of a Credentials instance instead with new
values.
Args:
key (str): API key of user account. Defaults to previous value if
not specified.
username (str): User name of account. This parameter is optional if
`base_url` is not specified, but defaults to the previous
value if not set.
base_url (str): Base URL of user account. This parameter is
optional if `username` is specified and on CARTO's
cloud-based account. Generally of the form
``https://your_user_name.carto.com/`` for cloud-based accounts.
If on-prem or otherwise, contact your admin.
Example:
.. code::
from cartoframes import Credentials
# load credentials saved in previous session
| python | {
"resource": ""
} |
q18647 | Credentials.base_url | train | def base_url(self, base_url=None):
"""Return or set `base_url`.
Args:
base_url (str, optional): If set, updates the `base_url`. Otherwise
returns current `base_url`.
Note:
This does not update the `username` attribute. Separately update
the username with ``Credentials.username`` or update `base_url` and
`username` at the same time with ``Credentials.set``.
Example:
.. code::
>>> from cartoframes import Credentials
# load credentials saved in previous session
>>> creds = Credentials()
# returns current base_url
>>> creds.base_url()
'https://eschbacher.carto.com/'
# updates base_url with new value
>>> creds.base_url('new_base_url') | python | {
"resource": ""
} |
q18648 | chat | train | def chat(room=None, stream=None, **kwargs):
"""Quick setup for a chatroom.
:param str room: Roomname, if not given, a random sequence is generated and printed.
:param MediaStream stream: The media stream to share, if not given a CameraStream will be created.
| python | {
"resource": ""
} |
q18649 | SceneGraph.add_child | train | def add_child(self, child):
"""Adds an object as a child in the scene graph."""
if not issubclass(child.__class__, SceneGraph):
raise TypeError("child must have | python | {
"resource": ""
} |
q18650 | ProjectionBase.copy | train | def copy(self):
"""Returns a copy of the projection matrix"""
params = {}
for key, val in self.__dict__.items():
if 'matrix' not in key:
k = key[1:] if key[0] == '_' else key
| python | {
"resource": ""
} |
q18651 | PerspectiveProjection.match_aspect_to_viewport | train | def match_aspect_to_viewport(self):
"""Updates Camera.aspect to match the viewport's aspect ratio."""
| python | {
"resource": ""
} |
q18652 | Camera.to_pickle | train | def to_pickle(self, filename):
"""Save Camera to a pickle file, given a filename."""
| python | {
"resource": ""
} |
q18653 | Camera.from_pickle | train | def from_pickle(cls, filename):
"""Loads and Returns a Camera from a pickle file, given a filename."""
with open(filename, 'rb') as f:
cam = pickle.load(f)
projection = cam.projection.copy()
| python | {
"resource": ""
} |
q18654 | CameraGroup.look_at | train | def look_at(self, x, y, z):
"""Converges the two cameras to look | python | {
"resource": ""
} |
q18655 | FBO.bind | train | def bind(self):
"""Bind the FBO. Anything drawn afterward will be stored in the FBO's texture."""
# This is called simply to deal with anything that might be currently bound (for example, Pyglet objects),
gl.glBindTexture(gl.GL_TEXTURE_2D, 0)
# Store current viewport size for later
self._old_viewport = get_viewport()
| python | {
"resource": ""
} |
q18656 | FBO.unbind | train | def unbind(self):
"""Unbind the FBO."""
# Unbind the FBO
if self.texture.mipmap:
with self.texture:
self.texture.generate_mipmap()
| python | {
"resource": ""
} |
q18657 | create_opengl_object | train | def create_opengl_object(gl_gen_function, n=1):
"""Returns int pointing to an OpenGL texture"""
handle = gl.GLuint(1)
gl_gen_function(n, byref(handle)) # Create n Empty Objects
if n > 1:
| python | {
"resource": ""
} |
q18658 | vec | train | def vec(data, dtype=float):
""" Makes GLfloat or GLuint vector containing float or uint args.
By default, newtype is 'float', but can be set to 'int' to make
uint list. """
gl_types = {float: gl.GLfloat, int: gl.GLuint}
try:
| python | {
"resource": ""
} |
q18659 | calculate_normals | train | def calculate_normals(vertices):
"""Return Nx3 normal array from Nx3 vertex array."""
verts = np.array(vertices, dtype=float)
normals = np.zeros_like(verts)
for start, end in pairwise(np.arange(0, verts.shape[0] + 1, 3)):
vecs = np.vstack((verts[start | python | {
"resource": ""
} |
q18660 | Scene.draw | train | def draw(self, clear=True):
"""Draw each visible mesh in the scene from the perspective of the scene's camera and lit by its light."""
if clear:
self.clear()
with self.gl_states, self.camera, self.light:
| python | {
"resource": ""
} |
q18661 | Scene.draw360_to_texture | train | def draw360_to_texture(self, cubetexture, **kwargs):
"""
Draw each visible mesh in the scene from the perspective of the scene's camera and lit by its light, and
applies it to each face of cubetexture, which should be currently bound to an FBO.
"""
assert self.camera.projection.aspect == 1. and self.camera.projection.fov_y == 90 # todo: fix aspect property, which currently reads from viewport.
if not isinstance(cubetexture, TextureCube):
raise ValueError("Must render to TextureCube")
# for face, rotation in enumerate([[180, 90, 0], [180, -90, 0], [90, 0, 0], [-90, 0, 0], [180, 0, 0], [0, 0, 180]]):
old_rotation = self.camera.rotation
| python | {
"resource": ""
} |
q18662 | VAO.assign_vertex_attrib_location | train | def assign_vertex_attrib_location(self, vbo, location):
"""Load data into a vbo"""
with vbo:
if self.n_verts:
assert vbo.data.shape[0] == self.n_verts
else:
self.n_verts = vbo.data.shape[0]
| python | {
"resource": ""
} |
q18663 | Mesh.copy | train | def copy(self):
"""Returns a copy of the Mesh."""
return Mesh(arrays=deepcopy([arr.copy() for arr in [self.vertices, self.normals, self.texcoords]]), texture=self.textures, mean_center=deepcopy(self._mean_center),
position=self.position.xyz, rotation=self.rotation.__class__(*self.rotation[:]), scale=self.scale.xyz,
| python | {
"resource": ""
} |
q18664 | Mesh.from_pickle | train | def from_pickle(cls, filename):
"""Loads and Returns a Mesh from a pickle file, given | python | {
"resource": ""
} |
q18665 | Mesh.reset_uniforms | train | def reset_uniforms(self):
""" Resets the uniforms to the Mesh object to the ""global"" coordinate system"""
| python | {
"resource": ""
} |
q18666 | Mesh._fill_vao | train | def _fill_vao(self):
"""Put array location in VAO for shader in same order as arrays given to Mesh."""
with self.vao:
self.vbos = []
for loc, verts in enumerate(self.arrays):
| python | {
"resource": ""
} |
q18667 | Mesh.draw | train | def draw(self):
""" Draw the Mesh if it's visible, from the perspective of the camera and lit by the light. The function sends the uniforms"""
if not self.vao:
self.vao = VAO(indices=self.array_indices)
self._fill_vao()
if self.visible:
if self.dynamic:
for vbo in self.vbos:
vbo._buffer_subdata()
if self.drawmode == gl.GL_POINTS:
| python | {
"resource": ""
} |
q18668 | cross_product_matrix | train | def cross_product_matrix(vec):
"""Returns a 3x3 cross-product matrix from a 3-element vector."""
return np.array([[0, -vec[2], vec[1]],
| python | {
"resource": ""
} |
q18669 | Texture.max_texture_limit | train | def max_texture_limit(self):
"""The maximum number of textures available for this graphic card's fragment shader."""
max_unit_array = (gl.GLint * 1)()
| python | {
"resource": ""
} |
q18670 | Texture._apply_filter_settings | train | def _apply_filter_settings(self):
"""Applies some hard-coded texture filtering settings."""
# TODO: Allow easy customization of filters
if self.mipmap:
gl.glTexParameterf(self.target, gl.GL_TEXTURE_MIN_FILTER, gl.GL_LINEAR_MIPMAP_LINEAR)
| python | {
"resource": ""
} |
q18671 | Texture.attach_to_fbo | train | def attach_to_fbo(self):
"""Attach the texture to a bound FBO object, for rendering to texture."""
| python | {
"resource": ""
} |
q18672 | Texture.from_image | train | def from_image(cls, img_filename, mipmap=False, **kwargs):
"""Uses Pyglet's image.load function to generate a Texture from an image file. If 'mipmap', then texture will
| python | {
"resource": ""
} |
q18673 | TextureCube._genTex2D | train | def _genTex2D(self):
"""Generate an empty texture in OpenGL"""
for face in range(6):
gl.glTexImage2D(self.target0 + face, | python | {
"resource": ""
} |
q18674 | WavefrontReader.get_mesh | train | def get_mesh(self, body_name, **kwargs):
"""Builds Mesh from geom name in the wavefront file. Takes all keyword arguments that Mesh takes."""
body = self.bodies[body_name]
vertices = body['v']
normals = body['vn'] if 'vn' in body else None
texcoords = body['vt'] if 'vt' in body else None
mesh = Mesh.from_incomplete_data(vertices=vertices, normals=normals, texcoords=texcoords, **kwargs)
uniforms = kwargs['uniforms'] if 'uniforms' in kwargs else {}
if 'material' in body:
material_props = {self.material_property_map[key]: value for key, value in iteritems(body['material'])}
for key, value in iteritems(material_props):
if isinstance(value, str):
if key == 'map_Kd':
if not value in self.textures:
self.textures[value] = Texture.from_image(value)
mesh.textures.append(self.textures[value])
| python | {
"resource": ""
} |
q18675 | UniformCollection.send | train | def send(self):
"""
Sends all the key-value pairs to the graphics card.
These uniform variables will be available in the currently-bound shader.
"""
for name, array in iteritems(self):
shader_id = c_int(0)
gl.glGetIntegerv(gl.GL_CURRENT_PROGRAM, byref(shader_id))
if shader_id.value == 0:
raise UnboundLocalError("""Shader not bound to OpenGL context--uniform cannot be sent.
------------ Tip -------------
with ratcave.default_shader:
mesh.draw()
------------------------------
""")
# Attach a shader location value to the array, for quick memory lookup. (gl calls are expensive, for some reason)
try:
loc, shader_id_for_array = array.loc
if shader_id.value != shader_id_for_array:
raise Exception('Uniform location bound to a different shader')
except (AttributeError, Exception) as e:
array.loc = (gl.glGetUniformLocation(shader_id.value, name.encode('ascii')), shader_id.value)
| python | {
"resource": ""
} |
q18676 | Shader.bind | train | def bind(self):
"""Activate this Shader, making it the currently-bound program.
Any Mesh.draw() calls after bind() will have their data processed by this Shader. To unbind, call Shader.unbind().
Example::
shader.bind()
mesh.draw()
| python | {
"resource": ""
} |
q18677 | Shader.from_file | train | def from_file(cls, vert, frag, **kwargs):
"""
Reads the shader programs, given the vert and frag filenames
Arguments:
- vert (str): The filename of the vertex shader program (ex: 'vertshader.vert')
- frag (str): The filename of the fragment shader program (ex: 'fragshader.frag')
Returns:
| python | {
"resource": ""
} |
q18678 | Shader.link | train | def link(self):
"""link the program, making it the active shader.
.. note:: Shader.bind() is preferred here, because link() Requires the Shader to be compiled already.
"""
gl.glLinkProgram(self.id)
# Check if linking was successful. If not, print the log.
link_status = c_int(0)
gl.glGetProgramiv(self.id, gl.GL_LINK_STATUS, byref(link_status))
if not link_status:
gl.glGetProgramiv(self.id, gl.GL_INFO_LOG_LENGTH, | python | {
"resource": ""
} |
q18679 | PhysicalGraph.add_child | train | def add_child(self, child, modify=False):
""" Adds an object as a child in the scene graph. With modify=True, model_matrix_transform gets change from identity and prevents the changes of the coordinates of the child"""
SceneGraph.add_child(self, child)
self.notify()
if modify:
| python | {
"resource": ""
} |
q18680 | Example.setup_tree | train | def setup_tree(self):
"""Setup an example Treeview"""
self.tree.insert("", tk.END, text="Example 1", iid="1")
self.tree.insert("", tk.END, text="Example 2", iid="2")
| python | {
"resource": ""
} |
q18681 | Example.grid_widgets | train | def grid_widgets(self):
"""Put widgets in the grid"""
sticky = {"sticky": "nswe"}
self.label.grid(row=1, column=1, columnspan=2, **sticky)
self.dropdown.grid(row=2, column=1, **sticky)
self.entry.grid(row=2, column=2, **sticky)
self.button.grid(row=3, column=1, columnspan=2, **sticky)
self.radio_one.grid(row=4, column=1, **sticky)
self.radio_two.grid(row=4, column=2, **sticky)
self.checked.grid(row=5, column=1, **sticky)
self.unchecked.grid(row=5, column=2, **sticky)
| python | {
"resource": ""
} |
q18682 | Example.screenshot | train | def screenshot(self, *args):
"""Take a screenshot, crop and save"""
from mss import mss
if not os.path.exists("screenshots"):
os.makedirs("screenshots")
box = {
"top": self.winfo_y(),
"left": self.winfo_x(),
"width": self.winfo_width(),
| python | {
"resource": ""
} |
q18683 | Example.screenshot_themes | train | def screenshot_themes(self, *args):
"""Take a screenshot for all themes available"""
from time import sleep
for theme in THEMES:
| python | {
"resource": ""
} |
q18684 | ThemedWidget._load_themes | train | def _load_themes(self):
"""Load the themes into the Tkinter interpreter"""
with utils.temporary_chdir(utils.get_file_directory()):
self._append_theme_dir("themes")
self.tk.eval("source themes/pkgIndex.tcl")
theme_dir = "gif" if not self.png_support else "png"
| python | {
"resource": ""
} |
q18685 | ThemedWidget._append_theme_dir | train | def _append_theme_dir(self, name):
"""Append a theme dir to the Tk interpreter auto_path"""
| python | {
"resource": ""
} |
q18686 | ThemedWidget.set_theme | train | def set_theme(self, theme_name):
"""
Set new theme to use. Uses a direct tk call to allow usage
of the themes supplied with this package.
:param theme_name: name of | python | {
"resource": ""
} |
q18687 | ThemedWidget.set_theme_advanced | train | def set_theme_advanced(self, theme_name, brightness=1.0,
saturation=1.0, hue=1.0,
preserve_transparency=True, output_dir=None,
advanced_name="advanced"):
"""
Load an advanced theme that is dynamically created
Applies the given modifiers to the images of the theme given and
then creates a theme from these new images with the name
'advanced' and then applies this theme. Is not available without
support for PNG-based themes, then raises RuntimeError.
"""
if not self.png_support:
raise RuntimeError("PNG-based themes are not supported in the environment")
# Check if the theme is a pixmap theme
if theme_name not in self.pixmap_themes:
raise ValueError("Theme is not a valid pixmap theme")
# Check if theme is available in the first place
if theme_name not in self.themes:
raise ValueError("Theme to create new theme from is not available: {}".format(theme_name))
if advanced_name in self.themes:
raise RuntimeError("The same name for an advanced theme cannot be used twice")
# Unload advanced if already loaded
output_dir | python | {
"resource": ""
} |
q18688 | ThemedWidget._setup_advanced_theme | train | def _setup_advanced_theme(self, theme_name, output_dir, advanced_name):
"""
Setup all the files required to enable an advanced theme.
Copies all the files over and creates the required directories
if they do not exist.
:param theme_name: theme to copy the files over from
:param output_dir: output directory to place the files in
"""
"""Directories"""
output_theme_dir = os.path.join(output_dir, advanced_name)
output_images_dir = os.path.join(output_theme_dir, advanced_name)
input_theme_dir = os.path.join(
utils.get_themes_directory(theme_name, self.png_support), theme_name)
input_images_dir = os.path.join(input_theme_dir, theme_name)
advanced_pkg_dir = os.path.join(utils.get_file_directory(), "advanced")
"""Directory creation"""
for directory in [output_dir, output_theme_dir]:
utils.create_directory(directory)
"""Theme TCL file"""
file_name = theme_name + ".tcl"
theme_input = os.path.join(input_theme_dir, file_name)
theme_output = os.path.join(output_theme_dir, "{}.tcl".format(advanced_name))
with open(theme_input, "r") as fi, open(theme_output, "w") as fo:
for line in fi:
# Setup new theme
line = line.replace(theme_name, advanced_name)
# Setup new image format
| python | {
"resource": ""
} |
q18689 | ThemedWidget._setup_images | train | def _setup_images(directory, brightness, saturation, hue, preserve_transparency):
"""
Apply modifiers to the images of a theme
Modifies the images using the PIL.ImageEnhance module. Using
this function, theme images are modified to given them a
unique look and feel. Works best with PNG-based images.
"""
for file_name in os.listdir(directory):
with open(os.path.join(directory, file_name), "rb") as fi:
image = Image.open(fi).convert("RGBA")
# Only perform required operations
if brightness != 1.0:
enhancer = ImageEnhance.Brightness(image)
image = enhancer.enhance(brightness)
if saturation != 1.0:
enhancer = ImageEnhance.Color(image)
image = enhancer.enhance(saturation)
if hue != 1.0:
| python | {
"resource": ""
} |
q18690 | ThemedTk.set_theme | train | def set_theme(self, theme_name, toplevel=None, themebg=None):
"""Redirect the set_theme call to also set Tk background color"""
if self._toplevel is not None and toplevel is None:
toplevel = self._toplevel
if self._themebg is not None and themebg is None:
themebg = self._themebg
| python | {
"resource": ""
} |
q18691 | ThemedTk.config | train | def config(self, kw=None, **kwargs):
"""configure redirect to support additional options"""
themebg = kwargs.pop("themebg", self._themebg)
toplevel = kwargs.pop("toplevel", self._toplevel)
theme = kwargs.pop("theme", self.current_theme)
color = self._get_bg_color()
if themebg != self._themebg:
if themebg is False:
self.configure(bg="white")
else:
self.configure(bg=color)
self._themebg = themebg
| python | {
"resource": ""
} |
q18692 | ThemedTk.cget | train | def cget(self, k):
"""cget redirect to support additional options"""
if k == "themebg":
return self._themebg
| python | {
"resource": ""
} |
q18693 | build_and_install_wheel | train | def build_and_install_wheel(python):
"""Build a binary distribution wheel and install it"""
dist_type = "bdist_wheel" if not SDIST else "sdist"
return_code = run_command("{} setup.py {}".format(python, dist_type))
if return_code != 0:
print("Building and installing wheel failed.")
exit(return_code)
# Check if an artifact exists
assert check_wheel_existence()
print("Wheel file exists.")
# Install the wheel file
wheel = [file for file in | python | {
"resource": ""
} |
q18694 | ci | train | def ci(python="python", codecov="codecov", coverage_file="coverage.xml", wheel=True):
"""
Run the most common CI tasks
"""
# Import pip
from pip import __version__ as pip_version
if Version(pip_version) >= Version("10.0.0"):
import pip._internal as pip
else:
import pip
# Install requirements with pip
pip.main(["install"] + DEPENDENCIES + REQUIREMENTS + ["-U"])
# Build the installation wheel
if wheel is True:
build_and_install_wheel(python)
# Remove all non-essential files
for to_delete in TO_DELETE:
rmtree(to_delete)
# Run the tests on the installed ttkthemes
return_code | python | {
"resource": ""
} |
q18695 | ci_macos | train | def ci_macos():
"""
Setup Travis-CI macOS for wheel building
"""
run_command("brew install $PYTHON pipenv || echo \"Installed PipEnv\"")
command_string = "sudo -H $PIP | python | {
"resource": ""
} |
q18696 | ThemedStyle.theme_use | train | def theme_use(self, theme_name=None):
"""
Set a new theme to use or return current theme name
:param theme_name: name of | python | {
"resource": ""
} |
q18697 | get_temp_directory | train | def get_temp_directory():
"""Return an absolute path to an existing temporary directory"""
# Supports all platforms supported by tempfile
directory = | python | {
"resource": ""
} |
q18698 | create_directory | train | def create_directory(directory):
"""Create directory but first delete it if it exists""" | python | {
"resource": ""
} |
q18699 | make_transparent | train | def make_transparent(image):
"""Turn all black pixels in an image into transparent ones"""
data = image.copy().getdata()
modified = []
for item in | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.