text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def process_image(path, bands=None, verbose=False, pansharpen=False, ndvi=False, force_unzip=None, ndvigrey=False, bounds=None):
""" Handles constructing and image process. :param path: The path to the image that has to be processed :type path: String :param bands: List of bands that has to be processed. (optional) :type bands: List :param verbose: Sets the level of verbosity. Default is False. :type verbose: boolean :param pansharpen: Whether to pansharpen the image. Default is False. :type pansharpen: boolean :returns: (String) path to the processed image """ |
try:
bands = convert_to_integer_list(bands)
if pansharpen:
p = PanSharpen(path, bands=bands, dst_path=settings.PROCESSED_IMAGE,
verbose=verbose, force_unzip=force_unzip, bounds=bounds)
elif ndvigrey:
p = NDVI(path, verbose=verbose, dst_path=settings.PROCESSED_IMAGE, force_unzip=force_unzip, bounds=bounds)
elif ndvi:
p = NDVIWithManualColorMap(path, dst_path=settings.PROCESSED_IMAGE,
verbose=verbose, force_unzip=force_unzip, bounds=bounds)
else:
p = Simple(path, bands=bands, dst_path=settings.PROCESSED_IMAGE, verbose=verbose, force_unzip=force_unzip,
bounds=bounds)
except IOError as err:
exit(str(err), 1)
except FileDoesNotExist as err:
exit(str(err), 1)
return p.run() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _read_bands(self):
""" Reads a band with rasterio """ |
bands = []
try:
for i, band in enumerate(self.bands):
bands.append(rasterio.open(self.bands_path[i]).read_band(1))
except IOError as e:
exit(e.message, 1)
return bands |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _unzip(self, src, dst, scene, force_unzip=False):
""" Unzip tar files """ |
self.output("Unzipping %s - It might take some time" % scene, normal=True, arrow=True)
try:
# check if file is already unzipped, skip
if isdir(dst) and not force_unzip:
self.output('%s is already unzipped.' % scene, normal=True, color='green', indent=1)
return
else:
tar = tarfile.open(src, 'r')
tar.extractall(path=dst)
tar.close()
except tarfile.ReadError:
check_create_folder(dst)
subprocess.check_call(['tar', '-xf', src, '-C', dst]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _filename(self, name=None, suffix=None, prefix=None):
""" File name generator for processed images """ |
filename = ''
if prefix:
filename += str(prefix) + '_'
if name:
filename += str(name)
else:
filename += str(self.scene)
if suffix:
filename += '_' + str(suffix)
if self.clipped:
bounds = [tuple(self.bounds[0:2]), tuple(self.bounds[2:4])]
polyline = PolylineCodec().encode(bounds)
filename += '_clipped_' + polyline
filename += '.TIF'
return filename |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self):
""" Executes the image processing. :returns: (String) the path to the processed image """ |
self.output('Image processing started for bands %s' % '-'.join(map(str, self.bands)), normal=True, arrow=True)
bands = self._read_bands()
image_data = self._get_image_data()
new_bands = self._generate_new_bands(image_data['shape'])
self._warp(image_data, bands, new_bands)
# Bands are no longer needed
del bands
rasterio_options = {
'driver': 'GTiff',
'width': image_data['shape'][1],
'height': image_data['shape'][0],
'count': 3,
'dtype': numpy.uint8,
'nodata': 0,
'transform': image_data['dst_transform'],
'photometric': 'RGB',
'crs': self.dst_crs
}
return self._write_to_file(new_bands, **rasterio_options) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def output(self, value, normal=False, color=None, error=False, arrow=False, indent=None):
""" Handles verbosity of this calls. if priority is set to 1, the value is printed if class instance verbose is True, the value is printed :param value: a string representing the message to be printed :type value: String :param normal: if set to true the message is always printed, otherwise it is only shown if verbosity is set :type normal: boolean :param color: The color of the message, choices: 'red', 'green', 'blue' :type normal: String :param error: if set to true the message appears in red :type error: Boolean :param arrow: if set to true an arrow appears before the message :type arrow: Boolean :param indent: indents the message based on the number provided :type indent: Boolean :returns: void """ |
if error and value and (normal or self.verbose):
return self._print(value, color='red', indent=indent)
if self.verbose or normal:
return self._print(value, color, arrow, indent)
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def subprocess(self, argv):
""" Execute subprocess commands with proper ouput. This is no longer used in landsat-util :param argv: A list of subprocess arguments :type argv: List :returns: void """ |
if self.verbose:
proc = subprocess.Popen(argv, stderr=subprocess.PIPE)
else:
proc = subprocess.Popen(argv, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
self.output(proc.stderr.read(), error=True)
return |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def exit(self, message):
""" outputs an exit message and exits :param message: The message to be outputed :type message: String :returns: void """ |
self.output(message, normal=True, color="green")
sys.exit() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _print(self, msg, color=None, arrow=False, indent=None):
""" Print the msg with the color provided. """ |
if color:
msg = colored(msg, color)
if arrow:
msg = colored('===> ', 'blue') + msg
if indent:
msg = (' ' * indent) + msg
print(msg)
return msg |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def nn_symbols():
"query the names and values of nanomsg symbols"
value = ctypes.c_int()
name_value_pairs = []
i = 0
while True:
name = _nn_symbol(i, ctypes.byref(value))
if name is None:
break
i += 1
name_value_pairs.append((name.decode('ascii'), value.value))
return name_value_pairs |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def nn_setsockopt(socket, level, option, value):
"""set a socket option socket - socket number level - option level option - option value - a readable byte buffer (not a Unicode string) containing the value returns - 0 on success or < 0 on error """ |
try:
return _nn_setsockopt(socket, level, option, ctypes.addressof(value),
len(value))
except (TypeError, AttributeError):
buf_value = ctypes.create_string_buffer(value)
return _nn_setsockopt(socket, level, option,
ctypes.addressof(buf_value), len(value)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def nn_getsockopt(socket, level, option, value):
"""retrieve a socket option socket - socket number level - option level option - option value - a writable byte buffer (e.g. a bytearray) which the option value will be copied to returns - number of bytes copied or on error nunber < 0 """ |
if memoryview(value).readonly:
raise TypeError('Writable buffer is required')
size_t_size = ctypes.c_size_t(len(value))
rtn = _nn_getsockopt(socket, level, option, ctypes.addressof(value),
ctypes.byref(size_t_size))
return (rtn, size_t_size.value) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def nn_send(socket, msg, flags):
"send a message"
try:
return _nn_send(socket, ctypes.addressof(msg), len(buffer(msg)), flags)
except (TypeError, AttributeError):
buf_msg = ctypes.create_string_buffer(msg)
return _nn_send(socket, ctypes.addressof(buf_msg), len(msg), flags) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def nn_allocmsg(size, type):
"allocate a message"
pointer = _nn_allocmsg(size, type)
if pointer is None:
return None
return _create_message(pointer, size) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def nn_recv(socket, *args):
"receive a message"
if len(args) == 1:
flags, = args
pointer = ctypes.c_void_p()
rtn = _nn_recv(socket, ctypes.byref(pointer), ctypes.c_size_t(-1),
flags)
if rtn < 0:
return rtn, None
else:
return rtn, _create_message(pointer.value, rtn)
elif len(args) == 2:
msg_buf, flags = args
mv_buf = memoryview(msg_buf)
if mv_buf.readonly:
raise TypeError('Writable buffer is required')
rtn = _nn_recv(socket, ctypes.addressof(msg_buf), len(mv_buf), flags)
return rtn, msg_buf |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_message_buffer(size, type):
"""Create a message buffer""" |
rtn = wrapper.nn_allocmsg(size, type)
if rtn is None:
raise NanoMsgAPIError()
return rtn |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bind(self, address):
"""Add a local endpoint to the socket""" |
if self.uses_nanoconfig:
raise ValueError("Nanoconfig address must be sole endpoint")
endpoint_id = _nn_check_positive_rtn(
wrapper.nn_bind(self._fd, address)
)
ep = Socket.BindEndpoint(self, endpoint_id, address)
self._endpoints.append(ep)
return ep |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, address):
"""Add a remote endpoint to the socket""" |
if self.uses_nanoconfig:
raise ValueError("Nanoconfig address must be sole endpoint")
endpoint_id = _nn_check_positive_rtn(
wrapper.nn_connect(self.fd, address)
)
ep = Socket.ConnectEndpoint(self, endpoint_id, address)
self._endpoints.append(ep)
return ep |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure(self, address):
"""Configure socket's addresses with nanoconfig""" |
global nanoconfig_started
if len(self._endpoints):
raise ValueError("Nanoconfig address must be sole endpoint")
endpoint_id = _nn_check_positive_rtn(
wrapper.nc_configure(self.fd, address)
)
if not nanoconfig_started:
nanoconfig_started = True
ep = Socket.NanoconfigEndpoint(self, endpoint_id, address)
self._endpoints.append(ep)
return ep |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def close(self):
"""Close the socket""" |
if self.is_open():
fd = self._fd
self._fd = -1
if self.uses_nanoconfig:
wrapper.nc_close(fd)
else:
_nn_check_positive_rtn(wrapper.nn_close(fd)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def recv(self, buf=None, flags=0):
"""Recieve a message.""" |
if buf is None:
rtn, out_buf = wrapper.nn_recv(self.fd, flags)
else:
rtn, out_buf = wrapper.nn_recv(self.fd, buf, flags)
_nn_check_positive_rtn(rtn)
return bytes(buffer(out_buf))[:rtn] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def bounter(size_mb=None, need_iteration=True, need_counts=True, log_counting=None):
"""Factory method for bounter implementation. Args: size_mb (int):
Desired memory footprint of the counter. need_iteration (Bool):
With `True`, create a `HashTable` implementation which can iterate over inserted key/value pairs. With `False`, create a `CountMinSketch` implementation which performs better in limited-memory scenarios, but does not support iteration over elements. need_counts (Bool):
With `True`, construct the structure normally. With `False`, ignore all remaining parameters and create a minimalistic cardinality counter based on hyperloglog which only takes 64KB memory. log_counting (int):
Counting to use with `CountMinSketch` implementation. Accepted values are `None` (default counting with 32-bit integers), 1024 (16-bit), 8 (8-bit). See `CountMinSketch` documentation for details. Raise ValueError if not `None `and `need_iteration` is `True`. """ |
if not need_counts:
return CardinalityEstimator()
if size_mb is None:
raise ValueError("Max size in MB must be provided.")
if need_iteration:
if log_counting:
raise ValueError("Log counting is only supported with CMS implementation (need_iteration=False).")
return HashTable(size_mb=size_mb)
else:
return CountMinSketch(size_mb=size_mb, log_counting=log_counting) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_dict(obj, classkey=None):
""" Recursively converts Python object into a dictionary """ |
if isinstance(obj, dict):
data = {}
for (k, v) in obj.items():
data[k] = to_dict(v, classkey)
return data
elif hasattr(obj, "_ast"):
return to_dict(obj._ast())
elif hasattr(obj, "__iter__") and not isinstance(obj, str):
return [to_dict(v, classkey) for v in obj]
elif hasattr(obj, "__dict__"):
if six.PY2:
data = dict([(key, to_dict(value, classkey))
for key, value in obj.__dict__.iteritems()
if not callable(value) and not key.startswith('_')])
else:
data = dict([(key, to_dict(value, classkey))
for key, value in obj.__dict__.items()
if not callable(value) and not key.startswith('_')])
if classkey is not None and hasattr(obj, "__class__"):
data[classkey] = obj.__class__.__name__
return data
else:
return obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def json_filter(self):
""" filter out properties that have names starting with _ or properties that have a value of None """ |
return lambda obj: dict((k, v) for k, v in obj.__dict__.items()
if not k.startswith('_') and getattr(obj, k) is not None) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_current_user(self):
"""Get data from the current user endpoint""" |
url = self.current_user_url
result = self.get(url)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_report(self, report_type, qs=None):
"""Get data from the report endpoint""" |
if qs is None:
qs = {}
url = self.api_url + "/company/{0}/reports/{1}".format(self.company_id, report_type)
result = self.get(url, params=qs)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _meters_per_pixel(zoom, lat=0.0, tilesize=256):
""" Return the pixel resolution for a given mercator tile zoom and lattitude. Parameters zoom: int Mercator zoom level lat: float, optional Latitude in decimal degree (default: 0) tilesize: int, optional Mercator tile size (default: 256). Returns ------- Pixel resolution in meters """ |
return (math.cos(lat * math.pi / 180.0) * 2 * math.pi * 6378137) / (
tilesize * 2 ** zoom
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def zoom_for_pixelsize(pixel_size, max_z=24, tilesize=256):
""" Get mercator zoom level corresponding to a pixel resolution. Freely adapted from https://github.com/OSGeo/gdal/blob/b0dfc591929ebdbccd8a0557510c5efdb893b852/gdal/swig/python/scripts/gdal2tiles.py#L294 Parameters pixel_size: float Pixel size max_z: int, optional (default: 24) Max mercator zoom level allowed tilesize: int, optional Mercator tile size (default: 256). Returns ------- Mercator zoom level corresponding to the pixel resolution """ |
for z in range(max_z):
if pixel_size > _meters_per_pixel(z, 0, tilesize=tilesize):
return max(0, z - 1) # We don't want to scale up
return max_z - 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def metadata(address, pmin=2, pmax=98, **kwargs):
""" Return image bounds and band statistics. Attributes address : str or PathLike object A dataset path or URL. Will be opened in "r" mode. pmin : int, optional, (default: 2) Histogram minimum cut. pmax : int, optional, (default: 98) Histogram maximum cut. kwargs : optional These are passed to 'rio_tiler.utils.raster_get_stats' e.g: overview_level=2, dst_crs='epsg:4326' Returns ------- out : dict Dictionary with image bounds and bands statistics. """ |
info = {"address": address}
info.update(utils.raster_get_stats(address, percentiles=(pmin, pmax), **kwargs))
return info |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tile(address, tile_x, tile_y, tile_z, tilesize=256, **kwargs):
""" Create mercator tile from any images. Attributes address : str file url. tile_x : int Mercator tile X index. tile_y : int Mercator tile Y index. tile_z : int Mercator tile ZOOM level. tilesize : int, optional (default: 256) Output image size. kwargs: dict, optional These will be passed to the 'rio_tiler.utils._tile_read' function. Returns ------- data : numpy ndarray mask: numpy array """ |
with rasterio.open(address) as src:
wgs_bounds = transform_bounds(
*[src.crs, "epsg:4326"] + list(src.bounds), densify_pts=21
)
if not utils.tile_exists(wgs_bounds, tile_z, tile_x, tile_y):
raise TileOutsideBounds(
"Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y)
)
mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z)
tile_bounds = mercantile.xy_bounds(mercator_tile)
return utils.tile_read(src, tile_bounds, tilesize, **kwargs) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _stats(arr, percentiles=(2, 98), **kwargs):
""" Calculate array statistics. Attributes arr: numpy ndarray Input array data to get the stats from. percentiles: tuple, optional Tuple of Min/Max percentiles to compute. kwargs: dict, optional These will be passed to the numpy.histogram function. Returns ------- dict numpy array statistics: percentiles, min, max, stdev, histogram e.g. { 'pc': [38, 147], 'min': 20, 'max': 180, 'std': 28.123562304138662, 'histogram': [ [1625, 219241, 28344, 15808, 12325, 10687, 8535, 7348, 4656, 1208], [20.0, 36.0, 52.0, 68.0, 84.0, 100.0, 116.0, 132.0, 148.0, 164.0, 180.0] ] } """ |
sample, edges = np.histogram(arr[~arr.mask], **kwargs)
return {
"pc": np.percentile(arr[~arr.mask], percentiles).astype(arr.dtype).tolist(),
"min": arr.min().item(),
"max": arr.max().item(),
"std": arr.std().item(),
"histogram": [sample.tolist(), edges.tolist()],
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def raster_get_stats( src_path, indexes=None, nodata=None, overview_level=None, max_size=1024, percentiles=(2, 98), dst_crs=CRS({"init": "EPSG:4326"}), histogram_bins=10, histogram_range=None, ):
""" Retrieve dataset statistics. Attributes src_path : str or PathLike object A dataset path or URL. Will be opened in "r" mode. indexes : tuple, list, int, optional Dataset band indexes. nodata, int, optional Custom nodata value if not preset in dataset. overview_level : int, optional Overview (decimation) level to fetch. max_size: int, optional Maximum size of dataset to retrieve (will be used to calculate the overview level to fetch). percentiles : tulple, optional Percentile or sequence of percentiles to compute, which must be between 0 and 100 inclusive (default: (2, 98)). dst_crs: CRS or dict Target coordinate reference system (default: EPSG:4326). histogram_bins: int, optional Defines the number of equal-width histogram bins (default: 10). histogram_range: tuple or list, optional The lower and upper range of the bins. If not provided, range is simply the min and max of the array. Returns ------- out : dict bounds, mercator zoom range, band descriptions and band statistics: (percentiles), min, max, stdev, histogram e.g. { 'bounds': { 'value': (145.72265625, 14.853515625, 145.810546875, 14.94140625), 'crs': '+init=EPSG:4326' }, 'minzoom': 8, 'maxzoom': 12, 'band_descriptions': [(1, 'red'), (2, 'green'), (3, 'blue'), (4, 'nir')] 'statistics': { 1: { 'pc': [38, 147], 'min': 20, 'max': 180, 'std': 28.123562304138662, 'histogram': [ [1625, 219241, 28344, 15808, 12325, 10687, 8535, 7348, 4656, 1208], [20.0, 36.0, 52.0, 68.0, 84.0, 100.0, 116.0, 132.0, 148.0, 164.0, 180.0] ] } } } """ |
if isinstance(indexes, int):
indexes = [indexes]
elif isinstance(indexes, tuple):
indexes = list(indexes)
with rasterio.open(src_path) as src_dst:
levels = src_dst.overviews(1)
width = src_dst.width
height = src_dst.height
indexes = indexes if indexes else src_dst.indexes
nodata = nodata if nodata is not None else src_dst.nodata
bounds = transform_bounds(
*[src_dst.crs, dst_crs] + list(src_dst.bounds), densify_pts=21
)
minzoom, maxzoom = get_zooms(src_dst)
def _get_descr(ix):
"""Return band description."""
name = src_dst.descriptions[ix - 1]
if not name:
name = "band{}".format(ix)
return name
band_descriptions = [(ix, _get_descr(ix)) for ix in indexes]
if len(levels):
if overview_level:
decim = levels[overview_level]
else:
# determine which zoom level to read
for ii, decim in enumerate(levels):
if max(width // decim, height // decim) < max_size:
break
else:
decim = 1
warnings.warn(
"Dataset has no overviews, reading the full dataset", NoOverviewWarning
)
out_shape = (len(indexes), height // decim, width // decim)
vrt_params = dict(add_alpha=True, resampling=Resampling.bilinear)
if has_alpha_band(src_dst):
vrt_params.update(dict(add_alpha=False))
if nodata is not None:
vrt_params.update(dict(nodata=nodata, add_alpha=False, src_nodata=nodata))
with WarpedVRT(src_dst, **vrt_params) as vrt:
arr = vrt.read(out_shape=out_shape, indexes=indexes, masked=True)
params = {}
if histogram_bins:
params.update(dict(bins=histogram_bins))
if histogram_range:
params.update(dict(range=histogram_range))
stats = {
indexes[b]: _stats(arr[b], percentiles=percentiles, **params)
for b in range(arr.shape[0])
if vrt.colorinterp[b] != ColorInterp.alpha
}
return {
"bounds": {
"value": bounds,
"crs": dst_crs.to_string() if isinstance(dst_crs, CRS) else dst_crs,
},
"minzoom": minzoom,
"maxzoom": maxzoom,
"band_descriptions": band_descriptions,
"statistics": stats,
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_vrt_transform(src_dst, bounds, bounds_crs="epsg:3857"):
""" Calculate VRT transform. Attributes src_dst : rasterio.io.DatasetReader Rasterio io.DatasetReader object bounds : list Bounds (left, bottom, right, top) bounds_crs : str Coordinate reference system string (default "epsg:3857") Returns ------- vrt_transform: Affine Output affine transformation matrix vrt_width, vrt_height: int Output dimensions """ |
dst_transform, _, _ = calculate_default_transform(
src_dst.crs, bounds_crs, src_dst.width, src_dst.height, *src_dst.bounds
)
w, s, e, n = bounds
vrt_width = math.ceil((e - w) / dst_transform.a)
vrt_height = math.ceil((s - n) / dst_transform.e)
vrt_transform = transform.from_bounds(w, s, e, n, vrt_width, vrt_height)
return vrt_transform, vrt_width, vrt_height |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def has_alpha_band(src_dst):
"""Check for alpha band or mask in source.""" |
if (
any([MaskFlags.alpha in flags for flags in src_dst.mask_flag_enums])
or ColorInterp.alpha in src_dst.colorinterp
):
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def linear_rescale(image, in_range=(0, 1), out_range=(1, 255)):
""" Linear rescaling. Attributes image : numpy ndarray Image array to rescale. in_range : list, int, optional, (default: [0,1]) Image min/max value to rescale. out_range : list, int, optional, (default: [1,255]) output min/max bounds to rescale to. Returns ------- out : numpy ndarray returns rescaled image array. """ |
imin, imax = in_range
omin, omax = out_range
image = np.clip(image, imin, imax) - imin
image = image / np.float(imax - imin)
return image * (omax - omin) + omin |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tile_exists(bounds, tile_z, tile_x, tile_y):
""" Check if a mercatile tile is inside a given bounds. Attributes bounds : list WGS84 bounds (left, bottom, right, top). x : int Mercator tile Y index. y : int Mercator tile Y index. z : int Mercator tile ZOOM level. Returns ------- out : boolean if True, the z-x-y mercator tile in inside the bounds. """ |
mintile = mercantile.tile(bounds[0], bounds[3], tile_z)
maxtile = mercantile.tile(bounds[2], bounds[1], tile_z)
return (
(tile_x <= maxtile.x + 1)
and (tile_x >= mintile.x)
and (tile_y <= maxtile.y + 1)
and (tile_y >= mintile.y)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _apply_discrete_colormap(arr, cmap):
""" Apply discrete colormap. Attributes arr : numpy.ndarray 1D image array to convert. color_map: dict Discrete ColorMap dictionary e.g: { 1: [255, 255, 255], 2: [255, 0, 0] } Returns ------- arr: numpy.ndarray """ |
res = np.zeros((arr.shape[1], arr.shape[2], 3), dtype=np.uint8)
for k, v in cmap.items():
res[arr[0] == k] = v
return np.transpose(res, [2, 0, 1]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def array_to_image( arr, mask=None, img_format="png", color_map=None, **creation_options ):
""" Translate numpy ndarray to image buffer using GDAL. Usage ----- with open('test.jpg', 'wb') as f: f.write(array_to_image(tile, mask, img_format="jpeg")) Attributes arr : numpy ndarray Image array to encode. mask: numpy ndarray, optional Mask array img_format: str, optional Image format to return (default: 'png'). List of supported format by GDAL: https://www.gdal.org/formats_list.html color_map: numpy.ndarray or dict, optional color_map can be either a (256, 3) array or RGB triplet from 0 to 255 OR it can be a dictionary of discrete values (e.g. { 1.3: [255, 255, 255], 2.5: [255, 0, 0]}) mapping any pixel value to a triplet creation_options: dict, optional Image driver creation options to pass to GDAL Returns ------- bytes """ |
img_format = img_format.lower()
if len(arr.shape) < 3:
arr = np.expand_dims(arr, axis=0)
if color_map is not None and isinstance(color_map, dict):
arr = _apply_discrete_colormap(arr, color_map)
elif color_map is not None:
arr = np.transpose(color_map[arr][0], [2, 0, 1]).astype(np.uint8)
# WEBP doesn't support 1band dataset so we must hack to create a RGB dataset
if img_format == "webp" and arr.shape[0] == 1:
arr = np.repeat(arr, 3, axis=0)
if mask is not None and img_format != "jpeg":
nbands = arr.shape[0] + 1
else:
nbands = arr.shape[0]
output_profile = dict(
driver=img_format,
dtype=arr.dtype,
count=nbands,
height=arr.shape[1],
width=arr.shape[2],
)
output_profile.update(creation_options)
with MemoryFile() as memfile:
with memfile.open(**output_profile) as dst:
dst.write(arr, indexes=list(range(1, arr.shape[0] + 1)))
# Use Mask as an alpha band
if mask is not None and img_format != "jpeg":
dst.write(mask.astype(arr.dtype), indexes=nbands)
return memfile.read() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_colormap(name="cfastie", format="pil"):
""" Return Pillow or GDAL compatible colormap array. Attributes name : str, optional Colormap name (default: cfastie) format: str, optional Compatiblity library, should be "pil" or "gdal" (default: pil). Returns ------- colormap : list or numpy.array Color map list in a Pillow friendly format more info: http://pillow.readthedocs.io/en/3.4.x/reference/Image.html#PIL.Image.Image.putpalette or Color map array in GDAL friendly format """ |
cmap_file = os.path.join(os.path.dirname(__file__), "cmap", "{0}.txt".format(name))
with open(cmap_file) as cmap:
lines = cmap.read().splitlines()
colormap = [
list(map(int, line.split())) for line in lines if not line.startswith("#")
][1:]
cmap = list(np.array(colormap).flatten())
if format.lower() == "pil":
return cmap
elif format.lower() == "gdal":
return np.array(list(_chunks(cmap, 3)))
else:
raise Exception("Unsupported {} colormap format".format(format)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mapzen_elevation_rgb(arr):
""" Encode elevation value to RGB values compatible with Mapzen tangram. Attributes arr : numpy ndarray Image array to encode. Returns ------- out : numpy ndarray RGB array (3, h, w) """ |
arr = np.clip(arr + 32768.0, 0.0, 65535.0)
r = arr / 256
g = arr % 256
b = (arr * 256) % 256
return np.stack([r, g, b]).astype(np.uint8) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def expression(sceneid, tile_x, tile_y, tile_z, expr=None, **kwargs):
""" Apply expression on data. Attributes sceneid : str Landsat id, Sentinel id, CBERS ids or file url. tile_x : int Mercator tile X index. tile_y : int Mercator tile Y index. tile_z : int Mercator tile ZOOM level. expr : str, required Expression to apply (e.g '(B5+B4)/(B5-B4)') Band name should start with 'B'. Returns ------- out : ndarray Returns processed pixel value. """ |
if not expr:
raise Exception("Missing expression")
bands_names = tuple(set(re.findall(r"b(?P<bands>[0-9A]{1,2})", expr)))
rgb = expr.split(",")
if sceneid.startswith("L"):
from rio_tiler.landsat8 import tile as l8_tile
arr, mask = l8_tile(
sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs
)
elif sceneid.startswith("S2"):
from rio_tiler.sentinel2 import tile as s2_tile
arr, mask = s2_tile(
sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs
)
elif sceneid.startswith("CBERS"):
from rio_tiler.cbers import tile as cbers_tile
arr, mask = cbers_tile(
sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs
)
else:
from rio_tiler.main import tile as main_tile
bands = tuple(map(int, bands_names))
arr, mask = main_tile(sceneid, tile_x, tile_y, tile_z, indexes=bands, **kwargs)
ctx = {}
for bdx, b in enumerate(bands_names):
ctx["b{}".format(b)] = arr[bdx]
return (
np.array(
[np.nan_to_num(ne.evaluate(bloc.strip(), local_dict=ctx)) for bloc in rgb]
),
mask,
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _sentinel_parse_scene_id(sceneid):
"""Parse Sentinel-2 scene id. Attributes sceneid : str Sentinel-2 sceneid. Returns ------- out : dict dictionary with metadata constructed from the sceneid. e.g: _sentinel_parse_scene_id('S2A_tile_20170323_07SNC_0') { "acquisitionDay": "23", "acquisitionMonth": "03", "acquisitionYear": "2017", "key": "tiles/7/S/NC/2017/3/23/0", "lat": "S", "num": "0", "satellite": "A", "scene": "S2A_tile_20170323_07SNC_0", "sensor": "2", "sq": "NC", "utm": "07", } """ |
if not re.match("^S2[AB]_tile_[0-9]{8}_[0-9]{2}[A-Z]{3}_[0-9]$", sceneid):
raise InvalidSentinelSceneId("Could not match {}".format(sceneid))
sentinel_pattern = (
r"^S"
r"(?P<sensor>\w{1})"
r"(?P<satellite>[AB]{1})"
r"_tile_"
r"(?P<acquisitionYear>[0-9]{4})"
r"(?P<acquisitionMonth>[0-9]{2})"
r"(?P<acquisitionDay>[0-9]{2})"
r"_"
r"(?P<utm>[0-9]{2})"
r"(?P<lat>\w{1})"
r"(?P<sq>\w{2})"
r"_"
r"(?P<num>[0-9]{1})$"
)
meta = None
match = re.match(sentinel_pattern, sceneid, re.IGNORECASE)
if match:
meta = match.groupdict()
utm_zone = meta["utm"].lstrip("0")
grid_square = meta["sq"]
latitude_band = meta["lat"]
year = meta["acquisitionYear"]
month = meta["acquisitionMonth"].lstrip("0")
day = meta["acquisitionDay"].lstrip("0")
img_num = meta["num"]
meta["key"] = "tiles/{}/{}/{}/{}/{}/{}/{}".format(
utm_zone, latitude_band, grid_square, year, month, day, img_num
)
meta["scene"] = sceneid
return meta |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tile(sceneid, tile_x, tile_y, tile_z, bands=("04", "03", "02"), tilesize=256):
""" Create mercator tile from Sentinel-2 data. Attributes sceneid : str Sentinel-2 sceneid. tile_x : int Mercator tile X index. tile_y : int Mercator tile Y index. tile_z : int Mercator tile ZOOM level. bands : tuple, str, optional (default: ('04', '03', '02')) Bands index for the RGB combination. tilesize : int, optional (default: 256) Output image size. Returns ------- data : numpy ndarray mask: numpy array """ |
if not isinstance(bands, tuple):
bands = tuple((bands,))
for band in bands:
if band not in SENTINEL_BANDS:
raise InvalidBandName("{} is not a valid Sentinel band name".format(band))
scene_params = _sentinel_parse_scene_id(sceneid)
sentinel_address = "{}/{}".format(SENTINEL_BUCKET, scene_params["key"])
sentinel_preview = "{}/preview.jp2".format(sentinel_address)
with rasterio.open(sentinel_preview) as src:
wgs_bounds = transform_bounds(
*[src.crs, "epsg:4326"] + list(src.bounds), densify_pts=21
)
if not utils.tile_exists(wgs_bounds, tile_z, tile_x, tile_y):
raise TileOutsideBounds(
"Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y)
)
mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z)
tile_bounds = mercantile.xy_bounds(mercator_tile)
addresses = ["{}/B{}.jp2".format(sentinel_address, band) for band in bands]
_tiler = partial(utils.tile_read, bounds=tile_bounds, tilesize=tilesize, nodata=0)
with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
data, masks = zip(*list(executor.map(_tiler, addresses)))
mask = np.all(masks, axis=0).astype(np.uint8) * 255
return np.concatenate(data), mask |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _landsat_get_mtl(sceneid):
""" Get Landsat-8 MTL metadata. Attributes sceneid : str Landsat sceneid. For scenes after May 2017, sceneid have to be LANDSAT_PRODUCT_ID. Returns ------- out : dict returns a JSON like object with the metadata. """ |
scene_params = _landsat_parse_scene_id(sceneid)
meta_file = "http://landsat-pds.s3.amazonaws.com/{}_MTL.txt".format(
scene_params["key"]
)
metadata = str(urlopen(meta_file).read().decode())
return toa_utils._parse_mtl_txt(metadata) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _landsat_parse_scene_id(sceneid):
""" Parse Landsat-8 scene id. Author @perrygeo - http://www.perrygeo.com """ |
pre_collection = r"(L[COTEM]8\d{6}\d{7}[A-Z]{3}\d{2})"
collection_1 = r"(L[COTEM]08_L\d{1}[A-Z]{2}_\d{6}_\d{8}_\d{8}_\d{2}_(T1|T2|RT))"
if not re.match("^{}|{}$".format(pre_collection, collection_1), sceneid):
raise InvalidLandsatSceneId("Could not match {}".format(sceneid))
precollection_pattern = (
r"^L"
r"(?P<sensor>\w{1})"
r"(?P<satellite>\w{1})"
r"(?P<path>[0-9]{3})"
r"(?P<row>[0-9]{3})"
r"(?P<acquisitionYear>[0-9]{4})"
r"(?P<acquisitionJulianDay>[0-9]{3})"
r"(?P<groundStationIdentifier>\w{3})"
r"(?P<archiveVersion>[0-9]{2})$"
)
collection_pattern = (
r"^L"
r"(?P<sensor>\w{1})"
r"(?P<satellite>\w{2})"
r"_"
r"(?P<processingCorrectionLevel>\w{4})"
r"_"
r"(?P<path>[0-9]{3})"
r"(?P<row>[0-9]{3})"
r"_"
r"(?P<acquisitionYear>[0-9]{4})"
r"(?P<acquisitionMonth>[0-9]{2})"
r"(?P<acquisitionDay>[0-9]{2})"
r"_"
r"(?P<processingYear>[0-9]{4})"
r"(?P<processingMonth>[0-9]{2})"
r"(?P<processingDay>[0-9]{2})"
r"_"
r"(?P<collectionNumber>\w{2})"
r"_"
r"(?P<collectionCategory>\w{2})$"
)
meta = None
for pattern in [collection_pattern, precollection_pattern]:
match = re.match(pattern, sceneid, re.IGNORECASE)
if match:
meta = match.groupdict()
break
if meta.get("acquisitionJulianDay"):
date = datetime.datetime(
int(meta["acquisitionYear"]), 1, 1
) + datetime.timedelta(int(meta["acquisitionJulianDay"]) - 1)
meta["date"] = date.strftime("%Y-%m-%d")
else:
meta["date"] = "{}-{}-{}".format(
meta["acquisitionYear"], meta["acquisitionMonth"], meta["acquisitionDay"]
)
collection = meta.get("collectionNumber", "")
if collection != "":
collection = "c{}".format(int(collection))
meta["key"] = os.path.join(
collection, "L8", meta["path"], meta["row"], sceneid, sceneid
)
meta["scene"] = sceneid
return meta |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _landsat_stats( band, address_prefix, metadata, overview_level=None, max_size=1024, percentiles=(2, 98), dst_crs=CRS({"init": "EPSG:4326"}), histogram_bins=10, histogram_range=None, ):
""" Retrieve landsat dataset statistics. Attributes band : str Landsat band number address_prefix : str A Landsat AWS S3 dataset prefix. metadata : dict Landsat metadata overview_level : int, optional Overview (decimation) level to fetch. max_size: int, optional Maximum size of dataset to retrieve (will be used to calculate the overview level to fetch). percentiles : tulple, optional Percentile or sequence of percentiles to compute, which must be between 0 and 100 inclusive (default: (2, 98)). dst_crs: CRS or dict Target coordinate reference system (default: EPSG:4326). histogram_bins: int, optional Defines the number of equal-width histogram bins (default: 10). histogram_range: tuple or list, optional The lower and upper range of the bins. If not provided, range is simply the min and max of the array. Returns ------- out : dict (percentiles), min, max, stdev, histogram for each band, e.g. { "4": { 'pc': [15, 121], 'min': 1, 'max': 162, 'std': 27.22067722127997, 'histogram': [ [102934, 135489, 20981, 13548, 11406, 8799, 7351, 5622, 2985, 662] [1., 17.1, 33.2, 49.3, 65.4, 81.5, 97.6, 113.7, 129.8, 145.9, 162.] ] } } """ |
src_path = "{}_B{}.TIF".format(address_prefix, band)
with rasterio.open(src_path) as src:
levels = src.overviews(1)
width = src.width
height = src.height
bounds = transform_bounds(
*[src.crs, dst_crs] + list(src.bounds), densify_pts=21
)
if len(levels):
if overview_level:
decim = levels[overview_level]
else:
# determine which zoom level to read
for ii, decim in enumerate(levels):
if max(width // decim, height // decim) < max_size:
break
else:
decim = 1
warnings.warn(
"Dataset has no overviews, reading the full dataset", NoOverviewWarning
)
out_shape = (height // decim, width // decim)
vrt_params = dict(
nodata=0, add_alpha=False, src_nodata=0, init_dest_nodata=False
)
with WarpedVRT(src, **vrt_params) as vrt:
arr = vrt.read(out_shape=out_shape, indexes=[1], masked=True)
if band in ["10", "11"]: # TIRS
multi_rad = metadata["RADIOMETRIC_RESCALING"].get(
"RADIANCE_MULT_BAND_{}".format(band)
)
add_rad = metadata["RADIOMETRIC_RESCALING"].get(
"RADIANCE_ADD_BAND_{}".format(band)
)
k1 = metadata["TIRS_THERMAL_CONSTANTS"].get("K1_CONSTANT_BAND_{}".format(band))
k2 = metadata["TIRS_THERMAL_CONSTANTS"].get("K2_CONSTANT_BAND_{}".format(band))
arr = brightness_temp.brightness_temp(arr, multi_rad, add_rad, k1, k2)
else:
multi_reflect = metadata["RADIOMETRIC_RESCALING"].get(
"REFLECTANCE_MULT_BAND_{}".format(band)
)
add_reflect = metadata["RADIOMETRIC_RESCALING"].get(
"REFLECTANCE_ADD_BAND_{}".format(band)
)
sun_elev = metadata["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"]
arr = 10000 * reflectance.reflectance(
arr, multi_reflect, add_reflect, sun_elev, src_nodata=0
)
params = {}
if histogram_bins:
params.update(dict(bins=histogram_bins))
if histogram_range:
params.update(dict(range=histogram_range))
stats = {band: utils._stats(arr, percentiles=percentiles, **params)}
return {
"bounds": {
"value": bounds,
"crs": dst_crs.to_string() if isinstance(dst_crs, CRS) else dst_crs,
},
"statistics": stats,
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def tile( sceneid, tile_x, tile_y, tile_z, bands=("4", "3", "2"), tilesize=256, pan=False ):
""" Create mercator tile from Landsat-8 data. Attributes sceneid : str Landsat sceneid. For scenes after May 2017, sceneid have to be LANDSAT_PRODUCT_ID. tile_x : int Mercator tile X index. tile_y : int Mercator tile Y index. tile_z : int Mercator tile ZOOM level. bands : tuple, str, optional (default: ("4", "3", "2")) Bands index for the RGB combination. tilesize : int, optional (default: 256) Output image size. pan : boolean, optional (default: False) If True, apply pan-sharpening. Returns ------- data : numpy ndarray mask: numpy array """ |
if not isinstance(bands, tuple):
bands = tuple((bands,))
for band in bands:
if band not in LANDSAT_BANDS:
raise InvalidBandName("{} is not a valid Landsat band name".format(band))
scene_params = _landsat_parse_scene_id(sceneid)
meta_data = _landsat_get_mtl(sceneid).get("L1_METADATA_FILE")
landsat_address = "{}/{}".format(LANDSAT_BUCKET, scene_params["key"])
wgs_bounds = toa_utils._get_bounds_from_metadata(meta_data["PRODUCT_METADATA"])
if not utils.tile_exists(wgs_bounds, tile_z, tile_x, tile_y):
raise TileOutsideBounds(
"Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y)
)
mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z)
tile_bounds = mercantile.xy_bounds(mercator_tile)
addresses = ["{}_B{}.TIF".format(landsat_address, band) for band in bands]
_tiler = partial(utils.tile_read, bounds=tile_bounds, tilesize=tilesize, nodata=0)
with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
data, masks = zip(*list(executor.map(_tiler, addresses)))
data = np.concatenate(data)
mask = np.all(masks, axis=0).astype(np.uint8) * 255
if pan:
pan_address = "{}_B8.TIF".format(landsat_address)
matrix_pan, mask = utils.tile_read(
pan_address, tile_bounds, tilesize, nodata=0
)
data = utils.pansharpening_brovey(data, matrix_pan, 0.2, matrix_pan.dtype)
sun_elev = meta_data["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"]
for bdx, band in enumerate(bands):
if int(band) > 9: # TIRS
multi_rad = meta_data["RADIOMETRIC_RESCALING"].get(
"RADIANCE_MULT_BAND_{}".format(band)
)
add_rad = meta_data["RADIOMETRIC_RESCALING"].get(
"RADIANCE_ADD_BAND_{}".format(band)
)
k1 = meta_data["TIRS_THERMAL_CONSTANTS"].get(
"K1_CONSTANT_BAND_{}".format(band)
)
k2 = meta_data["TIRS_THERMAL_CONSTANTS"].get(
"K2_CONSTANT_BAND_{}".format(band)
)
data[bdx] = brightness_temp.brightness_temp(
data[bdx], multi_rad, add_rad, k1, k2
)
else:
multi_reflect = meta_data["RADIOMETRIC_RESCALING"].get(
"REFLECTANCE_MULT_BAND_{}".format(band)
)
add_reflect = meta_data["RADIOMETRIC_RESCALING"].get(
"REFLECTANCE_ADD_BAND_{}".format(band)
)
data[bdx] = 10000 * reflectance.reflectance(
data[bdx], multi_reflect, add_reflect, sun_elev
)
return data, mask |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _cbers_parse_scene_id(sceneid):
"""Parse CBERS scene id. Attributes sceneid : str CBERS sceneid. Returns ------- out : dict dictionary with metadata constructed from the sceneid. e.g: _cbers_parse_scene_id('CBERS_4_PAN5M_20171121_057_094_L2') { "acquisitionDay": "21", "acquisitionMonth": "11", "acquisitionYear": "2017", "instrument": "PAN5M", "key": "CBERS4/PAN5M/057/094/CBERS_4_PAN5M_20171121_057_094_L2", "path": "057", "processingCorrectionLevel": "L2", "row": "094", "mission": "4", "scene": "CBERS_4_PAN5M_20171121_057_094_L2", "reference_band": "1", "bands": ["1"], "rgb": ("1", "1", "1"), "satellite": "CBERS", } """ |
if not re.match(r"^CBERS_4_\w+_[0-9]{8}_[0-9]{3}_[0-9]{3}_L[0-9]$", sceneid):
raise InvalidCBERSSceneId("Could not match {}".format(sceneid))
cbers_pattern = (
r"(?P<satellite>\w+)_"
r"(?P<mission>[0-9]{1})"
r"_"
r"(?P<instrument>\w+)"
r"_"
r"(?P<acquisitionYear>[0-9]{4})"
r"(?P<acquisitionMonth>[0-9]{2})"
r"(?P<acquisitionDay>[0-9]{2})"
r"_"
r"(?P<path>[0-9]{3})"
r"_"
r"(?P<row>[0-9]{3})"
r"_"
r"(?P<processingCorrectionLevel>L[0-9]{1})$"
)
meta = None
match = re.match(cbers_pattern, sceneid, re.IGNORECASE)
if match:
meta = match.groupdict()
path = meta["path"]
row = meta["row"]
instrument = meta["instrument"]
meta["key"] = "CBERS4/{}/{}/{}/{}".format(instrument, path, row, sceneid)
meta["scene"] = sceneid
instrument_params = {
"MUX": {
"reference_band": "6",
"bands": ["5", "6", "7", "8"],
"rgb": ("7", "6", "5"),
},
"AWFI": {
"reference_band": "14",
"bands": ["13", "14", "15", "16"],
"rgb": ("15", "14", "13"),
},
"PAN10M": {
"reference_band": "4",
"bands": ["2", "3", "4"],
"rgb": ("3", "4", "2"),
},
"PAN5M": {"reference_band": "1", "bands": ["1"], "rgb": ("1", "1", "1")},
}
meta["reference_band"] = instrument_params[instrument]["reference_band"]
meta["bands"] = instrument_params[instrument]["bands"]
meta["rgb"] = instrument_params[instrument]["rgb"]
return meta |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def metadata(sceneid, pmin=2, pmax=98, **kwargs):
""" Return band bounds and statistics. Attributes sceneid : str CBERS sceneid. pmin : int, optional, (default: 2) Histogram minimum cut. pmax : int, optional, (default: 98) Histogram maximum cut. kwargs : optional These are passed to 'rio_tiler.utils.raster_get_stats' e.g: histogram_bins=20, dst_crs='epsg:4326' Returns ------- out : dict Dictionary with bounds and bands statistics. """ |
scene_params = _cbers_parse_scene_id(sceneid)
cbers_address = "{}/{}".format(CBERS_BUCKET, scene_params["key"])
bands = scene_params["bands"]
ref_band = scene_params["reference_band"]
info = {"sceneid": sceneid}
addresses = [
"{}/{}_BAND{}.tif".format(cbers_address, sceneid, band) for band in bands
]
_stats_worker = partial(
utils.raster_get_stats,
indexes=[1],
nodata=0,
overview_level=2,
percentiles=(pmin, pmax),
**kwargs
)
with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor:
responses = list(executor.map(_stats_worker, addresses))
info["bounds"] = [r["bounds"] for b, r in zip(bands, responses) if b == ref_band][0]
info["statistics"] = {
b: v for b, d in zip(bands, responses) for k, v in d["statistics"].items()
}
return info |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def __datasets_desc():
"""return a df of the available datasets with description""" |
datasets = __get_data_folder_path() + 'datasets.csv'
df = pd.read_csv(datasets)
df = df[['Item', 'Title']]
df.columns = ['dataset_id', 'title']
# print('a list of the available datasets:')
return df |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dumb_property_dict(style):
"""returns a hash of css attributes""" |
return dict([(x.strip(), y.strip()) for x, y in [z.split(':', 1) for z in style.split(';') if ':' in z]]); |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dumb_css_parser(data):
"""returns a hash of css selectors, each of which contains a hash of css attributes""" |
# remove @import sentences
data += ';'
importIndex = data.find('@import')
while importIndex != -1:
data = data[0:importIndex] + data[data.find(';', importIndex) + 1:]
importIndex = data.find('@import')
# parse the css. reverted from dictionary compehension in order to support older pythons
elements = [x.split('{') for x in data.split('}') if '{' in x.strip()]
try:
elements = dict([(a.strip(), dumb_property_dict(b)) for a, b in elements])
except ValueError:
elements = {} # not that important
return elements |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def element_style(attrs, style_def, parent_style):
"""returns a hash of the 'final' style attributes of the element""" |
style = parent_style.copy()
if 'class' in attrs:
for css_class in attrs['class'].split():
css_style = style_def['.' + css_class]
style.update(css_style)
if 'style' in attrs:
immediate_style = dumb_property_dict(attrs['style'])
style.update(immediate_style)
return style |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def google_text_emphasis(style):
"""return a list of all emphasis modifiers of the element""" |
emphasis = []
if 'text-decoration' in style:
emphasis.append(style['text-decoration'])
if 'font-style' in style:
emphasis.append(style['font-style'])
if 'font-weight' in style:
emphasis.append(style['font-weight'])
return emphasis |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def google_fixed_width_font(style):
"""check if the css of the current element defines a fixed width font""" |
font_family = ''
if 'font-family' in style:
font_family = style['font-family']
if 'Courier New' == font_family or 'Consolas' == font_family:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def escape_md_section(text, snob=False):
"""Escapes markdown-sensitive characters across whole document sections.""" |
text = md_backslash_matcher.sub(r"\\\1", text)
if snob:
text = md_chars_matcher_all.sub(r"\\\1", text)
text = md_dot_matcher.sub(r"\1\\\2", text)
text = md_plus_matcher.sub(r"\1\\\2", text)
text = md_dash_matcher.sub(r"\1\\\2", text)
return text |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def handle_emphasis(self, start, tag_style, parent_style):
"""handles various text emphases""" |
tag_emphasis = google_text_emphasis(tag_style)
parent_emphasis = google_text_emphasis(parent_style)
# handle Google's text emphasis
strikethrough = 'line-through' in tag_emphasis and self.hide_strikethrough
bold = 'bold' in tag_emphasis and not 'bold' in parent_emphasis
italic = 'italic' in tag_emphasis and not 'italic' in parent_emphasis
fixed = google_fixed_width_font(tag_style) and not \
google_fixed_width_font(parent_style) and not self.pre
if start:
# crossed-out text must be handled before other attributes
# in order not to output qualifiers unnecessarily
if bold or italic or fixed:
self.emphasis += 1
if strikethrough:
self.quiet += 1
if italic:
self.o(self.emphasis_mark)
self.drop_white_space += 1
if bold:
self.o(self.strong_mark)
self.drop_white_space += 1
if fixed:
self.o('`')
self.drop_white_space += 1
self.code = True
else:
if bold or italic or fixed:
# there must not be whitespace before closing emphasis mark
self.emphasis -= 1
self.space = 0
self.outtext = self.outtext.rstrip()
if fixed:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(1)
self.drop_white_space -= 1
else:
self.o('`')
self.code = False
if bold:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(2)
self.drop_white_space -= 1
else:
self.o(self.strong_mark)
if italic:
if self.drop_white_space:
# empty emphasis, drop it
self.drop_last(1)
self.drop_white_space -= 1
else:
self.o(self.emphasis_mark)
# space is only allowed after *all* emphasis marks
if (bold or italic) and not self.emphasis:
self.o(" ")
if strikethrough:
self.quiet -= 1 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def google_nest_count(self, style):
"""calculate the nesting count of google doc lists""" |
nest_count = 0
if 'margin-left' in style:
nest_count = int(style['margin-left'][:-2]) / self.google_list_indent
return nest_count |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def optwrap(self, text):
"""Wrap all paragraphs in the provided text.""" |
if not self.body_width:
return text
assert wrap, "Requires Python 2.3."
result = ''
newlines = 0
for para in text.split("\n"):
if len(para) > 0:
if not skipwrap(para):
result += "\n".join(wrap(para, self.body_width))
if para.endswith(' '):
result += " \n"
newlines = 1
else:
result += "\n\n"
newlines = 2
else:
if not onlywhite(para):
result += para + "\n"
newlines = 1
else:
if newlines < 2:
result += "\n"
newlines += 1
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def similarity(w1, w2, threshold=0.5):
"""compare two strings 'words', and return ratio of smiliarity, be it larger than the threshold, or 0 otherwise. NOTE: if the result more like junk, increase the threshold value. """ |
ratio = SM(None, str(w1).lower(), str(w2).lower()).ratio()
return ratio if ratio > threshold else 0 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def triangle_area(p0, p1, p2):
if p2.ndim < 2:
p2 = p2[np.newaxis, :]
'''p2 can be a vector'''
area = 0.5 * np.abs(p0[0] * p1[1] - p0[0] * p2[:,1] +
p1[0] * p2[:,1] - p1[0] * p0[1] +
p2[:,0] * p0[1] - p2[:,0] * p1[1])
return area |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def inROI(self, Y):
'''which points are inside ROI'''
if Y.ndim > 1:
area = np.zeros((Y.shape[0],4))
else:
area = np.zeros((1,4))
pts = np.zeros((0,), int)
pdist = np.zeros((0,), int)
dist0 = 0
for k in range(len(self.prect)):
self.square_area = (triangle_area(self.prect[k][0,:], self.prect[k][1,:], self.prect[k][2,:]) +
triangle_area(self.prect[k][2,:], self.prect[k][3,:], self.prect[k][4,:]))
for n in range(4):
area[:,n] = triangle_area(self.prect[k][0+n,:], self.prect[k][1+n,:], Y)
# points inside prect
newpts = np.array((area.sum(axis=1) <= self.square_area+1e-5).nonzero()).flatten().astype(int)
if newpts.size > 0:
pts = np.concatenate((pts, newpts))
newdists = self.orthproj(Y[newpts, :], k) + dist0
pdist = np.concatenate((pdist, newdists))
dist0 += (np.diff(self.pos[k], axis=0)[0,:]**2).sum()
# check if in radius of circle
if k < len(self.prect)-1:
pcent = self.pos[k][1,:]
dist = ((Y - pcent[np.newaxis,:])**2).sum(axis=1)**0.5
newpts = np.array((dist<=self.d).nonzero()[0].astype(int))
if newpts.size > 0:
pts = np.concatenate((pts, newpts))
newdists = dist0 * np.ones(newpts.shape)
pdist = np.concatenate((pdist, newdists))
pts, inds = np.unique(pts, return_index=True)
pdist = pdist[inds]
return pts, pdist |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def dwrap(kx,nc):
'''compute a wrapped distance'''
q1 = np.mod(kx, nc)
q2 = np.minimum(q1, nc-q1)
return q2 |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def transform(self, X):
""" if already fit, can add new points and see where they fall""" |
iclustup = []
dims = self.n_components
if hasattr(self, 'isort1'):
if X.shape[1] == self.v.shape[0]:
# reduce dimensionality of X
X = X @ self.v
nclust = self.n_X
AtS = self.A.T @ self.S
vnorm = np.sum(self.S * (self.A @ AtS), axis=0)[np.newaxis,:]
cv = X @ AtS
cmap = np.maximum(0., cv)**2 / vnorm
iclustup, cmax = upsample(np.sqrt(cmap), dims, nclust, 10)
else:
print('ERROR: new points do not have as many features as original data')
else:
print('ERROR: need to fit model first before you can embed new points')
if iclustup.ndim > 1:
iclustup = iclustup.T
else:
iclustup = iclustup.flatten()
return iclustup |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def goingLive(self, ctx, client):
'''
Overrides nevow method; not really safe to just save ctx,
client in self for multiple clients, but nice and simple.
'''
self.ctx = ctx
self.client = client |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _router_address(self, data):
"""only for IPv6 addresses""" |
args = data.split()[1:]
try:
self._relay_attrs['ip_v6'].extend(args)
except KeyError:
self._relay_attrs['ip_v6'] = list(args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _notify(self, func, *args, **kw):
""" Internal helper. Calls the IStreamListener function 'func' with the given args, guarding around errors. """ |
for x in self.listeners:
try:
getattr(x, func)(*args, **kw)
except Exception:
log.err() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build_timeout_circuit(tor_state, reactor, path, timeout, using_guards=False):
""" Build a new circuit within a timeout. CircuitBuildTimedOutError will be raised unless we receive a circuit build result (success or failure) within the `timeout` duration. :returns: a Deferred which fires when the circuit build succeeds (or fails to build). """ |
timed_circuit = []
d = tor_state.build_circuit(routers=path, using_guards=using_guards)
def get_circuit(c):
timed_circuit.append(c)
return c
def trap_cancel(f):
f.trap(defer.CancelledError)
if timed_circuit:
d2 = timed_circuit[0].close()
else:
d2 = defer.succeed(None)
d2.addCallback(lambda _: Failure(CircuitBuildTimedOutError("circuit build timed out")))
return d2
d.addCallback(get_circuit)
d.addCallback(lambda circ: circ.when_built())
d.addErrback(trap_cancel)
reactor.callLater(timeout, d.cancel)
return d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def age(self, now=None):
""" Returns an integer which is the difference in seconds from 'now' to when this circuit was created. Returns None if there is no created-time. """ |
if not self.time_created:
return None
if now is None:
now = datetime.utcnow()
return (now - self.time_created).seconds |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _parse_version_reply(self):
"waiting for a version reply"
if len(self._data) >= 2:
reply = self._data[:2]
self._data = self._data[2:]
(version, method) = struct.unpack('BB', reply)
if version == 5 and method in [0x00, 0x02]:
self.version_reply(method)
else:
if version != 5:
self.version_error(SocksError(
"Expected version 5, got {}".format(version)))
else:
self.version_error(SocksError(
"Wanted method 0 or 2, got {}".format(method))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _parse_request_reply(self):
"waiting for a reply to our request"
# we need at least 6 bytes of data: 4 for the "header", such
# as it is, and 2 more if it's DOMAINNAME (for the size) or 4
# or 16 more if it's an IPv4/6 address reply. plus there's 2
# bytes on the end for the bound port.
if len(self._data) < 8:
return
msg = self._data[:4]
# not changing self._data yet, in case we've not got
# enough bytes so far.
(version, reply, _, typ) = struct.unpack('BBBB', msg)
if version != 5:
self.reply_error(SocksError(
"Expected version 5, got {}".format(version)))
return
if reply != self.SUCCEEDED:
self.reply_error(_create_socks_error(reply))
return
reply_dispatcher = {
self.REPLY_IPV4: self._parse_ipv4_reply,
self.REPLY_HOST: self._parse_domain_name_reply,
self.REPLY_IPV6: self._parse_ipv6_reply,
}
try:
method = reply_dispatcher[typ]
except KeyError:
self.reply_error(SocksError(
"Unexpected response type {}".format(typ)))
return
method() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _make_connection(self, addr, port):
"make our proxy connection"
sender = self._create_connection(addr, port)
# XXX look out! we're depending on this "sender" implementing
# certain Twisted APIs, and the state-machine shouldn't depend
# on that.
# XXX also, if sender implements producer/consumer stuff, we
# should register ourselves (and implement it to) -- but this
# should really be taking place outside the state-machine in
# "the I/O-doing" stuff
self._sender = sender
self._when_done.fire(sender) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _relay_data(self):
"relay any data we have"
if self._data:
d = self._data
self._data = b''
# XXX this is "doing I/O" in the state-machine and it
# really shouldn't be ... probably want a passed-in
# "relay_data" callback or similar?
self._sender.dataReceived(d) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _send_connect_request(self):
"sends CONNECT request"
# XXX needs to support v6 ... or something else does
host = self._addr.host
port = self._addr.port
if isinstance(self._addr, (IPv4Address, IPv6Address)):
is_v6 = isinstance(self._addr, IPv6Address)
self._data_to_send(
struct.pack(
'!BBBB4sH',
5, # version
0x01, # command
0x00, # reserved
0x04 if is_v6 else 0x01,
inet_pton(AF_INET6 if is_v6 else AF_INET, host),
port,
)
)
else:
host = host.encode('ascii')
self._data_to_send(
struct.pack(
'!BBBBB{}sH'.format(len(host)),
5, # version
0x01, # command
0x00, # reserved
0x03,
len(host),
host,
port,
)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_location(self):
""" Returns a Deferred that fires with a NetLocation object for this router. """ |
if self._location:
return succeed(self._location)
if self.ip != 'unknown':
self._location = NetLocation(self.ip)
else:
self._location = NetLocation(None)
if not self._location.countrycode and self.ip != 'unknown':
# see if Tor is magic and knows more...
d = self.controller.get_info_raw('ip-to-country/' + self.ip)
d.addCallback(self._set_country)
d.addCallback(lambda _: self._location)
return d
return succeed(self._location) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def policy(self, args):
""" setter for the policy descriptor """ |
word = args[0]
if word == 'reject':
self.accepted_ports = None
self.rejected_ports = []
target = self.rejected_ports
elif word == 'accept':
self.accepted_ports = []
self.rejected_ports = None
target = self.accepted_ports
else:
raise RuntimeError("Don't understand policy word \"%s\"" % word)
for port in args[1].split(','):
if '-' in port:
(a, b) = port.split('-')
target.append(PortRange(int(a), int(b)))
else:
target.append(int(port)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def accepts_port(self, port):
""" Query whether this Router will accept the given port. """ |
if self.rejected_ports is None and self.accepted_ports is None:
raise RuntimeError("policy hasn't been set yet")
if self.rejected_ports:
for x in self.rejected_ports:
if port == x:
return False
return True
for x in self.accepted_ports:
if port == x:
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_country(self, c):
""" callback if we used Tor's GETINFO ip-to-country """ |
self.location.countrycode = c.split()[0].split('=')[1].strip().upper() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _load_private_key_file(fname):
""" Loads an onion-service private-key from the given file. This can be either a 'key blog' as returned from a previous ADD_ONION call, or a v3 or v2 file as created by Tor when using the HiddenServiceDir directive. In any case, a key-blob suitable for ADD_ONION use is returned. """ |
with open(fname, "rb") as f:
data = f.read()
if b"\x00\x00\x00" in data: # v3 private key file
blob = data[data.find(b"\x00\x00\x00") + 3:]
return u"ED25519-V3:{}".format(b2a_base64(blob.strip()).decode('ascii').strip())
if b"-----BEGIN RSA PRIVATE KEY-----" in data: # v2 RSA key
blob = "".join(data.decode('ascii').split('\n')[1:-2])
return u"RSA1024:{}".format(blob)
blob = data.decode('ascii').strip()
if ':' in blob:
kind, key = blob.split(':', 1)
if kind in ['ED25519-V3', 'RSA1024']:
return blob
raise ValueError(
"'{}' does not appear to contain v2 or v3 private key data".format(
fname,
)
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def config_attributes(self):
""" Helper method used by TorConfig when generating a torrc file. """ |
rtn = [('HiddenServiceDir', str(self.dir))]
if self.conf._supports['HiddenServiceDirGroupReadable'] \
and self.group_readable:
rtn.append(('HiddenServiceDirGroupReadable', str(1)))
for port in self.ports:
rtn.append(('HiddenServicePort', str(port)))
if self.version:
rtn.append(('HiddenServiceVersion', str(self.version)))
for authline in self.authorize_client:
rtn.append(('HiddenServiceAuthorizeClient', str(authline)))
return rtn |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def add_to_tor(self, protocol):
'''
Returns a Deferred which fires with 'self' after at least one
descriptor has been uploaded. Errback if no descriptor upload
succeeds.
'''
upload_d = _await_descriptor_upload(protocol, self, progress=None, await_all_uploads=False)
# _add_ephemeral_service takes a TorConfig but we don't have
# that here .. and also we're just keeping this for
# backwards-compatability anyway so instead of trying to
# re-use that helper I'm leaving this original code here. So
# this is what it supports and that's that:
ports = ' '.join(map(lambda x: 'Port=' + x.strip(), self._ports))
cmd = 'ADD_ONION %s %s' % (self._key_blob, ports)
ans = yield protocol.queue_command(cmd)
ans = find_keywords(ans.split('\n'))
self.hostname = ans['ServiceID'] + '.onion'
if self._key_blob.startswith('NEW:'):
self.private_key = ans['PrivateKey']
else:
self.private_key = self._key_blob
log.msg('Created hidden-service at', self.hostname)
log.msg("Created '{}', waiting for descriptor uploads.".format(self.hostname))
yield upload_d |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def remove_from_tor(self, protocol):
'''
Returns a Deferred which fires with None
'''
r = yield protocol.queue_command('DEL_ONION %s' % self.hostname[:-6])
if r.strip() != 'OK':
raise RuntimeError('Failed to remove hidden service: "%s".' % r) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def from_protocol(proto):
""" This creates and returns a ready-to-go TorConfig instance from the given protocol, which should be an instance of TorControlProtocol. """ |
cfg = TorConfig(control=proto)
yield cfg.post_bootstrap
defer.returnValue(cfg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def socks_endpoint(self, reactor, port=None):
""" Returns a TorSocksEndpoint configured to use an already-configured SOCKSPort from the Tor we're connected to. By default, this will be the very first SOCKSPort. :param port: a str, the first part of the SOCKSPort line (that is, a port like "9151" or a Unix socket config like "unix:/path". You may also specify a port as an int. If you need to use a particular port that may or may not already be configured, see the async method :meth:`txtorcon.TorConfig.create_socks_endpoint` """ |
if len(self.SocksPort) == 0:
raise RuntimeError(
"No SOCKS ports configured"
)
socks_config = None
if port is None:
socks_config = self.SocksPort[0]
else:
port = str(port) # in case e.g. an int passed in
if ' ' in port:
raise ValueError(
"Can't specify options; use create_socks_endpoint instead"
)
for idx, port_config in enumerate(self.SocksPort):
# "SOCKSPort" is a gnarly beast that can have a bunch
# of options appended, so we have to split off the
# first thing which *should* be the port (or can be a
# string like 'unix:')
if port_config.split()[0] == port:
socks_config = port_config
break
if socks_config is None:
raise RuntimeError(
"No SOCKSPort configured for port {}".format(port)
)
return _endpoint_from_socksport_line(reactor, socks_config) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def attach_protocol(self, proto):
""" returns a Deferred that fires once we've set this object up to track the protocol. Fails if we already have a protocol. """ |
if self._protocol is not None:
raise RuntimeError("Already have a protocol.")
# make sure we have nothing in self.unsaved
self.save()
self.__dict__['_protocol'] = proto
# FIXME some of this is duplicated from ctor
del self.__dict__['_accept_all_']
self.__dict__['post_bootstrap'] = defer.Deferred()
if proto.post_bootstrap:
proto.post_bootstrap.addCallback(self.bootstrap)
return self.__dict__['post_bootstrap'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_type(self, name):
""" return the type of a config key. :param: name the key FIXME can we do something more-clever than this for client code to determine what sort of thing a key is? """ |
# XXX FIXME uhm...how to do all the different types of hidden-services?
if name.lower() == 'hiddenservices':
return FilesystemOnionService
return type(self.parsers[name]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def bootstrap(self, arg=None):
'''
This only takes args so it can be used as a callback. Don't
pass an arg, it is ignored.
'''
try:
d = self.protocol.add_event_listener(
'CONF_CHANGED', self._conf_changed)
except RuntimeError:
# for Tor versions which don't understand CONF_CHANGED
# there's nothing we can really do.
log.msg(
"Can't listen for CONF_CHANGED event; won't stay up-to-date "
"with other clients.")
d = defer.succeed(None)
d.addCallback(lambda _: self.protocol.get_info_raw("config/names"))
d.addCallback(self._do_setup)
d.addCallback(self.do_post_bootstrap)
d.addErrback(self.do_post_errback) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def save(self):
""" Save any outstanding items. This returns a Deferred which will errback if Tor was unhappy with anything, or callback with this TorConfig object on success. """ |
if not self.needs_save():
return defer.succeed(self)
args = []
directories = []
for (key, value) in self.unsaved.items():
if key == 'HiddenServices':
self.config['HiddenServices'] = value
# using a list here because at least one unit-test
# cares about order -- and conceivably order *could*
# matter here, to Tor...
services = list()
# authenticated services get flattened into the HiddenServices list...
for hs in value:
if IOnionClient.providedBy(hs):
parent = IOnionClient(hs).parent
if parent not in services:
services.append(parent)
elif isinstance(hs, (EphemeralOnionService, EphemeralHiddenService)):
raise ValueError(
"Only filesystem based Onion services may be added"
" via TorConfig.hiddenservices; ephemeral services"
" must be created with 'create_onion_service'."
)
else:
if hs not in services:
services.append(hs)
for hs in services:
for (k, v) in hs.config_attributes():
if k == 'HiddenServiceDir':
if v not in directories:
directories.append(v)
args.append(k)
args.append(v)
else:
raise RuntimeError("Trying to add hidden service with same HiddenServiceDir: %s" % v)
else:
args.append(k)
args.append(v)
continue
if isinstance(value, list):
for x in value:
# FIXME XXX
if x is not DEFAULT_VALUE:
args.append(key)
args.append(str(x))
else:
args.append(key)
args.append(value)
# FIXME in future we should wait for CONF_CHANGED and
# update then, right?
real_name = self._find_real_name(key)
if not isinstance(value, list) and real_name in self.parsers:
value = self.parsers[real_name].parse(value)
self.config[real_name] = value
# FIXME might want to re-think this, but currently there's no
# way to put things into a config and get them out again
# nicely...unless you just don't assign a protocol
if self.protocol:
d = self.protocol.set_conf(*args)
d.addCallback(self._save_completed)
return d
else:
self._save_completed()
return defer.succeed(self) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _timeout_expired(self):
""" A timeout was supplied during setup, and the time has run out. """ |
self._did_timeout = True
try:
self.transport.signalProcess('TERM')
except error.ProcessExitedAlready:
# XXX why don't we just always do this?
self.transport.loseConnection()
fail = Failure(RuntimeError("timeout while launching Tor"))
self._maybe_notify_connected(fail) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cleanup(self):
""" Clean up my temporary files. """ |
all([delete_file_or_tree(f) for f in self.to_delete])
self.to_delete = [] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def progress(self, percent, tag, summary):
""" Can be overridden or monkey-patched if you want to get progress updates yourself. """ |
if self.progress_updates:
self.progress_updates(percent, tag, summary) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def main(reactor):
""" Close all open streams and circuits in the Tor we connect to """ |
control_ep = UNIXClientEndpoint(reactor, '/var/run/tor/control')
tor = yield txtorcon.connect(reactor, control_ep)
state = yield tor.create_state()
print("Closing all circuits:")
for circuit in list(state.circuits.values()):
path = '->'.join(map(lambda r: r.id_hex, circuit.path))
print("Circuit {} through {}".format(circuit.id, path))
for stream in circuit.streams:
print(" Stream {} to {}".format(stream.id, stream.target_host))
yield stream.close()
print(" closed")
yield circuit.close()
print("closed")
yield tor.quit() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update(self, *args):
""" deals with an update from Tor; see parsing logic in torcontroller """ |
gmtexpires = None
(name, ip, expires) = args[:3]
for arg in args:
if arg.lower().startswith('expires='):
gmtexpires = arg[8:]
if gmtexpires is None:
if len(args) == 3:
gmtexpires = expires
else:
if args[2] == 'NEVER':
gmtexpires = args[2]
else:
gmtexpires = args[3]
self.name = name # "www.example.com"
self.ip = maybe_ip_addr(ip) # IPV4Address instance, or string
if self.ip == '<error>':
self._expire()
return
fmt = "%Y-%m-%d %H:%M:%S"
# if we already have expiry times, etc then we want to
# properly delay our timeout
oldexpires = self.expires
if gmtexpires.upper() == 'NEVER':
# FIXME can I just select a date 100 years in the future instead?
self.expires = None
else:
self.expires = datetime.datetime.strptime(gmtexpires, fmt)
self.created = datetime.datetime.utcnow()
if self.expires is not None:
if oldexpires is None:
if self.expires <= self.created:
diff = datetime.timedelta(seconds=0)
else:
diff = self.expires - self.created
self.expiry = self.map.scheduler.callLater(diff.seconds,
self._expire)
else:
diff = self.expires - oldexpires
self.expiry.delay(diff.seconds) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _expire(self):
""" callback done via callLater """ |
del self.map.addr[self.name]
self.map.notify("addrmap_expired", *[self.name], **{}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create_tbb_web_headers():
""" Returns a new `twisted.web.http_headers.Headers` instance populated with tags to mimic Tor Browser. These include values for `User-Agent`, `Accept`, `Accept-Language` and `Accept-Encoding`. """ |
return Headers({
b"User-Agent": [b"Mozilla/5.0 (Windows NT 6.1; rv:45.0) Gecko/20100101 Firefox/45.0"],
b"Accept": [b"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"],
b"Accept-Language": [b"en-US,en;q=0.5"],
b"Accept-Encoding": [b"gzip, deflate"],
}) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def version_at_least(version_string, major, minor, micro, patch):
""" This returns True if the version_string represents a Tor version of at least ``major``.``minor``.``micro``.``patch`` version, ignoring any trailing specifiers. """ |
parts = re.match(
r'^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+).*$',
version_string,
)
for ver, gold in zip(parts.group(1, 2, 3, 4), (major, minor, micro, patch)):
if int(ver) < int(gold):
return False
elif int(ver) > int(gold):
return True
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_tor_binary(globs=('/usr/sbin/', '/usr/bin/', '/Applications/TorBrowser_*.app/Contents/MacOS/'), system_tor=True):
""" Tries to find the tor executable using the shell first or in in the paths whose glob-patterns is in the given 'globs'-tuple. :param globs: A tuple of shell-style globs of directories to use to find tor (TODO consider making that globs to actual tor binary?) :param system_tor: This controls whether bash is used to seach for 'tor' or not. If False, we skip that check and use only the 'globs' tuple. """ |
# Try to find the tor executable using the shell
if system_tor:
try:
proc = subprocess.Popen(
('which tor'),
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
shell=True
)
except OSError:
pass
else:
stdout, _ = proc.communicate()
if proc.poll() == 0 and stdout != '':
return stdout.strip()
# the shell may not provide type and tor is usually not on PATH when using
# the browser-bundle. Look in specific places
for pattern in globs:
for path in glob.glob(pattern):
torbin = os.path.join(path, 'tor')
if is_executable(torbin):
return torbin
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def maybe_ip_addr(addr):
""" Tries to return an IPAddress, otherwise returns a string. TODO consider explicitly checking for .exit or .onion at the end? """ |
if six.PY2 and isinstance(addr, str):
addr = unicode(addr) # noqa
try:
return ipaddress.ip_address(addr)
except ValueError:
pass
return str(addr) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delete_file_or_tree(*args):
""" For every path in args, try to delete it as a file or a directory tree. Ignores deletion errors. """ |
for f in args:
try:
os.unlink(f)
except OSError:
shutil.rmtree(f, ignore_errors=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def available_tcp_port(reactor):
""" Returns a Deferred firing an available TCP port on localhost. It does so by listening on port 0; then stopListening and fires the assigned port number. """ |
endpoint = serverFromString(reactor, 'tcp:0:interface=127.0.0.1')
port = yield endpoint.listen(NoOpProtocolFactory())
address = port.getHost()
yield port.stopListening()
defer.returnValue(address.port) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.