_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q257300
cancel
validation
def cancel(batch_fn, cancel_fn, ops): """Cancel operations. Args: batch_fn: API-specific batch function. cancel_fn: API-specific cancel function. ops: A list of operations to cancel. Returns: A list of operations canceled and a list of error messages. """ # Canceling many operations one-by-one can be slow. # The Pipelines API doesn't directly support a list of operations to cancel, # but the requests can be performed in batch. canceled_ops = [] error_messages =
python
{ "resource": "" }
q257301
retry_api_check
validation
def retry_api_check(exception): """Return True if we should retry. False otherwise. Args: exception: An exception to test for transience. Returns: True if we should retry. False otherwise. """ if isinstance(exception, apiclient.errors.HttpError): if exception.resp.status in TRANSIENT_HTTP_ERROR_CODES: _print_error('Retrying...') return True if isinstance(exception, socket.error): if exception.errno in TRANSIENT_SOCKET_ERROR_CODES: _print_error('Retrying...') return True if isinstance(exception, oauth2client.client.AccessTokenRefreshError): _print_error('Retrying...') return True
python
{ "resource": "" }
q257302
retry_auth_check
validation
def retry_auth_check(exception): """Specific check for auth error codes. Return True if we should retry. False otherwise. Args: exception: An exception to test for transience. Returns: True if we should retry. False otherwise.
python
{ "resource": "" }
q257303
setup_service
validation
def setup_service(api_name, api_version, credentials=None): """Configures genomics API client. Args: api_name: Name of the Google API (for example: "genomics") api_version: Version of the API (for example: "v2alpha1") credentials: Credentials
python
{ "resource": "" }
q257304
Api.execute
validation
def execute(api): """Executes operation. Args: api: The base API object Returns: A response body object """ try: return api.execute() except Exception as exception: now = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
python
{ "resource": "" }
q257305
_eval_arg_type
validation
def _eval_arg_type(arg_type, T=Any, arg=None, sig=None): """Returns a type from a snippit of python source. Should normally be something just like 'str' or 'Object'. arg_type the source to be evaluated T the default type arg context of where this type was extracted sig context from where the arg was extracted Returns a type or a Type """ try: T = eval(arg_type)
python
{ "resource": "" }
q257306
jsonify_status_code
validation
def jsonify_status_code(status_code, *args, **kw): """Returns a jsonified response with the specified HTTP status code. The positional and keyword arguments are passed directly to the :func:`flask.jsonify` function which creates the response. """ is_batch = kw.pop('is_batch', False) if is_batch: response = flask_make_response(json.dumps(*args, **kw))
python
{ "resource": "" }
q257307
ServiceProxy.send_payload
validation
def send_payload(self, params): """Performs the actual sending action and returns the result """ data = json.dumps({ 'jsonrpc': self.version, 'method': self.service_name, 'params': params, 'id': text_type(uuid.uuid4())
python
{ "resource": "" }
q257308
Error.json_rpc_format
validation
def json_rpc_format(self): """Return the Exception data in a format for JSON-RPC """ error = { 'name': text_type(self.__class__.__name__), 'code': self.code, 'message': '{0}'.format(text_type(self.message)), 'data': self.data
python
{ "resource": "" }
q257309
Config.from_file
validation
def from_file(cls, file): """Try loading given config file. :param str file: full path to the config file to load """ if not os.path.exists(file): raise ValueError("Config file not found.") try: config_parser = configparser.ConfigParser() config_parser.read(file) configuration = cls(file, config_parser)
python
{ "resource": "" }
q257310
Config.discover
validation
def discover(cls): """Make a guess about the config file location an try loading it."""
python
{ "resource": "" }
q257311
Config.create_config
validation
def create_config(cls, cfgfile, nick, twtfile, twturl, disclose_identity, add_news): """Create a new config file at the default location. :param str cfgfile: path to the config file :param str nick: nickname to use for own tweets :param str twtfile: path to the local twtxt file :param str twturl: URL to the remote twtxt file :param bool disclose_identity: if true the users id will be disclosed :param bool add_news: if true follow twtxt news feed """ cfgfile_dir = os.path.dirname(cfgfile) if not os.path.exists(cfgfile_dir): os.makedirs(cfgfile_dir) cfg = configparser.ConfigParser() cfg.add_section("twtxt")
python
{ "resource": "" }
q257312
Config.write_config
validation
def write_config(self): """Writes `self.cfg` to `self.config_file`.""" with
python
{ "resource": "" }
q257313
validate_config_key
validation
def validate_config_key(ctx, param, value): """Validate a configuration key according to `section.item`.""" if not value: return value
python
{ "resource": "" }
q257314
expand_mentions
validation
def expand_mentions(text, embed_names=True): """Searches the given text for mentions and expands them. For example: "@source.nick" will be expanded to "@<source.nick source.url>". """ if embed_names: mention_format = "@<{name} {url}>" else: mention_format = "@<{url}>" def handle_mention(match): source = get_source_by_name(match.group(1))
python
{ "resource": "" }
q257315
make_aware
validation
def make_aware(dt): """Appends tzinfo and assumes UTC, if datetime object has no tzinfo already."""
python
{ "resource": "" }
q257316
Cache.from_file
validation
def from_file(cls, file, *args, **kwargs): """Try loading given cache file.""" try: cache = shelve.open(file) return cls(file, cache, *args, **kwargs)
python
{ "resource": "" }
q257317
Cache.discover
validation
def discover(cls, *args, **kwargs): """Make a guess about the cache file location an try loading it."""
python
{ "resource": "" }
q257318
Cache.is_cached
validation
def is_cached(self, url): """Checks if specified URL is cached.""" try:
python
{ "resource": "" }
q257319
Cache.add_tweets
validation
def add_tweets(self, url, last_modified, tweets): """Adds new tweets to the cache.""" try: self.cache[url] = {"last_modified": last_modified, "tweets": tweets}
python
{ "resource": "" }
q257320
Cache.get_tweets
validation
def get_tweets(self, url, limit=None): """Retrieves tweets from the cache.""" try: tweets = self.cache[url]["tweets"] self.mark_updated()
python
{ "resource": "" }
q257321
Cache.remove_tweets
validation
def remove_tweets(self, url): """Tries to remove cached tweets.""" try: del self.cache[url]
python
{ "resource": "" }
q257322
timeline
validation
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update): """Retrieve your personal timeline.""" if source: source_obj = ctx.obj["conf"].get_source_by_nick(source) if not source_obj: logger.debug("Not following {0}, trying as URL".format(source)) source_obj = Source(source, source) sources = [source_obj] else: sources = ctx.obj["conf"].following tweets = [] if cache: try: with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache: force_update = force_update or not cache.is_valid if force_update: tweets = get_remote_tweets(sources, limit, timeout, cache) else: logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format( cache.update_interval)) # Behold, almighty list comprehensions! (I might have gone overboard here…) tweets = list(chain.from_iterable([cache.get_tweets(source.url)
python
{ "resource": "" }
q257323
config
validation
def config(ctx, key, value, remove, edit): """Get or set config item.""" conf = ctx.obj["conf"] if not edit and not key: raise click.BadArgumentUsage("You have to specify either a key or use --edit.") if edit: return click.edit(filename=conf.config_file) if remove: try: conf.cfg.remove_option(key[0], key[1]) except Exception as e: logger.debug(e) else: conf.write_config() return
python
{ "resource": "" }
q257324
Tweet.relative_datetime
validation
def relative_datetime(self): """Return human-readable relative time string.""" now = datetime.now(timezone.utc) tense = "from now" if self.created_at > now else "ago"
python
{ "resource": "" }
q257325
save
validation
def save(url, *args, **kwargs): """ Parse the options, set defaults and then fire up PhantomJS. """ device = heimdallDevice(kwargs.get('device', None)) kwargs['width'] = kwargs.get('width', None) or device.width kwargs['height'] = kwargs.get('height', None) or device.height kwargs['user_agent'] = kwargs.get('user_agent', None) or device.user_agent
python
{ "resource": "" }
q257326
screenshot
validation
def screenshot(url, *args, **kwargs): """ Call PhantomJS with the specified flags and options. """ phantomscript = os.path.join(os.path.dirname(__file__), 'take_screenshot.js') directory = kwargs.get('save_dir', '/tmp') image_name = kwargs.get('image_name', None) or _image_name_from_url(url) ext = kwargs.get('format', 'png').lower() save_path = os.path.join(directory, image_name) + '.' + ext crop_to_visible = kwargs.get('crop_to_visible', False) cmd_args = [ 'phantomjs', '--ssl-protocol=any', phantomscript, url, '--width', str(kwargs['width']), '--height', str(kwargs['height']), '--useragent', str(kwargs['user_agent']), '--dir', directory, '--ext', ext, '--name',
python
{ "resource": "" }
q257327
_image_name_from_url
validation
def _image_name_from_url(url): """ Create a nice image name from the url.
python
{ "resource": "" }
q257328
worker
validation
def worker(f): """ Decorator. Abortable worker. If wrapped task will be cancelled by dispatcher, decorator will send ftp codes of successful interrupt. :: >>> @worker ... async def worker(self, connection, rest):
python
{ "resource": "" }
q257329
User.get_permissions
validation
def get_permissions(self, path): """ Return nearest parent permission for `path`. :param path: path which permission you want to know :type path: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :rtype: :py:class:`aioftp.Permission` """ path = pathlib.PurePosixPath(path)
python
{ "resource": "" }
q257330
AvailableConnections.release
validation
def release(self): """ Release, incrementing the internal counter by one. """ if self.value is not None: self.value += 1 if
python
{ "resource": "" }
q257331
register_memory
validation
def register_memory(): """Register an approximation of memory used by FTP server process and all of its children. """ # XXX How to get a reliable representation of memory being used is # not clear. (rss - shared) seems kind of ok but we might also use # the private working set via get_memory_maps().private*. def get_mem(proc): if os.name == 'posix': mem = proc.memory_info_ex() counter = mem.rss if 'shared' in mem._fields:
python
{ "resource": "" }
q257332
connect
validation
def connect(): """Connect to FTP server, login and return an ftplib.FTP instance.""" ftp_class = ftplib.FTP if not SSL else ftplib.FTP_TLS ftp = ftp_class(timeout=TIMEOUT) ftp.connect(HOST, PORT)
python
{ "resource": "" }
q257333
bytes_per_second
validation
def bytes_per_second(ftp, retr=True): """Return the number of bytes transmitted in 1 second.""" tot_bytes = 0 if retr: def request_file(): ftp.voidcmd('TYPE I') conn = ftp.transfercmd("retr " + TESTFN) return conn with contextlib.closing(request_file()) as conn: register_memory() stop_at = time.time() + 1.0 while stop_at > time.time(): chunk = conn.recv(BUFFER_LEN) if not chunk: a = time.time() ftp.voidresp() conn.close() conn = request_file() stop_at += time.time() - a tot_bytes += len(chunk) try: while chunk:
python
{ "resource": "" }
q257334
async_enterable
validation
def async_enterable(f): """ Decorator. Bring coroutine result up, so it can be used as async context :: >>> async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... ctx = await foo() ... async with ctx: ... ... # do :: >>> @async_enterable ... async def foo(): ... ... ... ... return AsyncContextInstance(...) ... ... async with foo() as ctx: ... ... # do ... ... ctx = await foo() ... async with ctx: ... ... # do """ @functools.wraps(f) def wrapper(*args, **kwargs): class AsyncEnterableInstance:
python
{ "resource": "" }
q257335
setlocale
validation
def setlocale(name): """ Context manager with threading lock for set locale on enter, and set it back to original state on exit. :: >>> with setlocale("C"): ... ... """ with LOCALE_LOCK: old_locale = locale.setlocale(locale.LC_ALL)
python
{ "resource": "" }
q257336
Throttle.append
validation
def append(self, data, start): """ Count `data` for throttle :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from :py:meth:`asyncio.BaseEventLoop.time` :type start: :py:class:`float` """ if self._limit is not None and self._limit > 0: if self._start is None: self._start = start
python
{ "resource": "" }
q257337
Throttle.limit
validation
def limit(self, value): """ Set throttle limit :param value: bytes per second :type value: :py:class:`int` or :py:class:`None`
python
{ "resource": "" }
q257338
StreamThrottle.clone
validation
def clone(self): """ Clone throttles without memory """ return StreamThrottle(
python
{ "resource": "" }
q257339
ThrottleStreamIO.append
validation
def append(self, name, data, start): """ Update timeout for all throttles :param name: name of throttle to append to ("read" or "write") :type name: :py:class:`str` :param data: bytes of data for count :type data: :py:class:`bytes` :param start: start of read/write time from
python
{ "resource": "" }
q257340
BaseClient.check_codes
validation
def check_codes(self, expected_codes, received_code, info): """ Checks if any of expected matches received. :param expected_codes: tuple of expected codes :type expected_codes: :py:class:`tuple` :param received_code: received code for matching :type received_code: :py:class:`aioftp.Code` :param info: list of response lines from server
python
{ "resource": "" }
q257341
BaseClient.parse_directory_response
validation
def parse_directory_response(s): """ Parsing directory server response. :param s: response line :type s: :py:class:`str` :rtype: :py:class:`pathlib.PurePosixPath` """ seq_quotes = 0 start = False directory = "" for ch in s: if not start: if ch == "\"": start = True else: if ch == "\"": seq_quotes += 1 else: if
python
{ "resource": "" }
q257342
BaseClient.parse_list_line_windows
validation
def parse_list_line_windows(self, b): """ Parsing Microsoft Windows `dir` output :param b: response line :type b: :py:class:`bytes` or :py:class:`str` :return: (path, info) :rtype: (:py:class:`pathlib.PurePosixPath`, :py:class:`dict`) """ line = b.decode(encoding=self.encoding).rstrip("\r\n") date_time_end = line.index("M") date_time_str = line[:date_time_end + 1].strip().split(" ") date_time_str = " ".join([x for x in date_time_str if len(x) > 0]) line = line[date_time_end + 1:].lstrip() with setlocale("C"): strptime = datetime.datetime.strptime date_time = strptime(date_time_str, "%m/%d/%Y %I:%M %p") info = {} info["modify"] = self.format_date_time(date_time) next_space = line.index(" ") if line.startswith("<DIR>"): info["type"] = "dir" else: info["type"] = "file" info["size"] = line[:next_space].replace(",", "")
python
{ "resource": "" }
q257343
Client.upload_stream
validation
def upload_stream(self, destination, *, offset=0): """ Create stream for write data to `destination` file. :param destination: destination path of file on server side :type destination: :py:class:`str` or :py:class:`pathlib.PurePosixPath` :param offset: byte offset for stream start position :type offset: :py:class:`int`
python
{ "resource": "" }
q257344
jenks_breaks
validation
def jenks_breaks(values, nb_class): """ Compute jenks natural breaks on a sequence of `values`, given `nb_class`, the number of desired class. Parameters ---------- values : array-like The Iterable sequence of numbers (integer/float) to be used. nb_class : int The desired number of class (as some other functions requests a `k` value, `nb_class` is like `k` + 1). Have to be lesser than the length of `values` and greater than 2. Returns ------- breaks : tuple of floats The computed break values, including minimum and maximum, in order to have all the bounds for building `nb_class` class, so the returned tuple has a length of `nb_class` + 1. Examples -------- Using nb_class = 3, expecting 4 break values , including min and max : >>> jenks_breaks( [1.3, 7.1, 7.3, 2.3, 3.9, 4.1, 7.8, 1.2, 4.3, 7.3, 5.0, 4.3], nb_class = 3) # Should output (1.2, 2.3, 5.0, 7.8) """ if not isinstance(values, Iterable) or isinstance(values, (str, bytes)): raise TypeError("A sequence of numbers is expected") if isinstance(nb_class, float) and int(nb_class) == nb_class: nb_class = int(nb_class) if not isinstance(nb_class, int): raise TypeError( "Number of class have to be
python
{ "resource": "" }
q257345
Gdk3PixbufWrapper.grab
validation
def grab(self, bbox=None): """Grabs an image directly to a buffer. :param bbox: Optional tuple or list containing (x1, y1, x2, y2) coordinates of sub-region to capture. :return: PIL RGB image :raises: ValueError, if image data does not have 3 channels (RGB), each with 8 bits. :rtype: Image """ w = Gdk.get_default_root_window() if bbox is not None: g = [bbox[0], bbox[1], bbox[2] - bbox[0], bbox[3] - bbox[1]] else: g = w.get_geometry() pb = Gdk.pixbuf_get_from_window(w, *g) if pb.get_bits_per_sample() != 8: raise ValueError('Expected 8 bits per pixel.') elif pb.get_n_channels() != 3: raise ValueError('Expected
python
{ "resource": "" }
q257346
grab
validation
def grab(bbox=None, childprocess=None, backend=None): """Copy the contents of the screen to PIL image memory. :param bbox: optional bounding box (x1,y1,x2,y2) :param childprocess: pyscreenshot can cause an error, if it is used on more different virtual displays and back-end is not in different process.
python
{ "resource": "" }
q257347
backend_version
validation
def backend_version(backend, childprocess=None): """Back-end version. :param backend: back-end (examples:scrot, wx,..) :param childprocess: see :py:func:`grab` :return: version as string """ if childprocess is None: childprocess = childprocess_default_value() if not
python
{ "resource": "" }
q257348
open
validation
def open( config, mode="continue", zoom=None, bounds=None, single_input_file=None, with_cache=False, debug=False ): """ Open a Mapchete process. Parameters ---------- config : MapcheteConfig object, config dict or path to mapchete file Mapchete process configuration mode : string * ``memory``: Generate process output on demand without reading pre-existing data or writing new data. * ``readonly``: Just read data without processing new data. * ``continue``: (default) Don't overwrite existing output. * ``overwrite``: Overwrite existing output. zoom : list or integer process zoom level or a pair of minimum and maximum zoom level bounds : tuple
python
{ "resource": "" }
q257349
_get_zoom_level
validation
def _get_zoom_level(zoom, process): """Determine zoom levels.""" if zoom is None: return reversed(process.config.zoom_levels) if isinstance(zoom, int): return [zoom]
python
{ "resource": "" }
q257350
_process_worker
validation
def _process_worker(process, process_tile): """Worker function running the process.""" logger.debug((process_tile.id, "running on %s" % current_process().name)) # skip execution if overwrite is disabled and tile exists if ( process.config.mode == "continue" and process.config.output.tiles_exist(process_tile) ): logger.debug((process_tile.id, "tile exists, skipping")) return ProcessInfo( tile=process_tile, processed=False, process_msg="output already exists", written=False, write_msg="nothing written" ) # execute on process tile else: with Timer() as t: try: output = process.execute(process_tile, raise_nodata=True) except MapcheteNodataTile: output = None
python
{ "resource": "" }
q257351
Mapchete.get_process_tiles
validation
def get_process_tiles(self, zoom=None): """ Yield process tiles. Tiles intersecting with the input data bounding boxes as well as process bounds, if provided, are considered process tiles. This is to avoid iterating through empty tiles. Parameters ---------- zoom : integer zoom level process tiles should be returned from; if none is given, return all process tiles yields ------ BufferedTile objects """ if zoom or zoom == 0:
python
{ "resource": "" }
q257352
Mapchete.batch_process
validation
def batch_process( self, zoom=None, tile=None, multi=cpu_count(), max_chunksize=1 ): """ Process a large batch of tiles. Parameters ---------- process : MapcheteProcess process to be run zoom : list or int either single zoom level or list of minimum and maximum zoom level; None processes all (default: None) tile : tuple zoom, row and column of tile to be processed (cannot be used with zoom)
python
{ "resource": "" }
q257353
Mapchete.batch_processor
validation
def batch_processor( self, zoom=None, tile=None, multi=cpu_count(), max_chunksize=1 ): """ Process a large batch of tiles and yield report messages per tile. Parameters ---------- zoom : list or int either single zoom level or list of minimum and maximum zoom level; None processes all (default: None) tile : tuple zoom, row and column of tile to be processed (cannot be used with zoom) multi : int number of workers (default: number of CPU cores) max_chunksize : int maximum number of process tiles to be queued for each worker; (default: 1) """ if zoom and tile: raise ValueError("use either zoom or tile") # run single tile if tile: yield _run_on_single_tile(self, tile) # run concurrently
python
{ "resource": "" }
q257354
Mapchete.execute
validation
def execute(self, process_tile, raise_nodata=False): """ Run the Mapchete process. Execute, write and return data. Parameters ---------- process_tile : Tile or tile index tuple Member of the process tile pyramid (not necessarily the output pyramid, if output has a different metatiling setting) Returns ------- data : NumPy array or features process output """ if self.config.mode not in ["memory", "continue", "overwrite"]: raise ValueError("process mode must be memory, continue or overwrite") if isinstance(process_tile, tuple):
python
{ "resource": "" }
q257355
Mapchete.read
validation
def read(self, output_tile): """ Read from written process output. Parameters ---------- output_tile : BufferedTile or tile index tuple Member of the output tile pyramid (not necessarily the process pyramid, if output has a different metatiling setting) Returns ------- data : NumPy array or features process output """ if self.config.mode not in ["readonly", "continue", "overwrite"]: raise ValueError("process mode must be readonly, continue or overwrite") if
python
{ "resource": "" }
q257356
Mapchete.write
validation
def write(self, process_tile, data): """ Write data into output format. Parameters ---------- process_tile : BufferedTile or tile index tuple process tile data : NumPy array or features data to be written """ if isinstance(process_tile, tuple): process_tile = self.config.process_pyramid.tile(*process_tile) elif not isinstance(process_tile, BufferedTile): raise ValueError("invalid process_tile type: %s" % type(process_tile)) if self.config.mode not in ["continue", "overwrite"]: raise ValueError("cannot write output in current process mode") if self.config.mode == "continue" and ( self.config.output.tiles_exist(process_tile) ): message = "output exists, not overwritten" logger.debug((process_tile.id, message)) return ProcessInfo( tile=process_tile, processed=False, process_msg=None, written=False, write_msg=message
python
{ "resource": "" }
q257357
Mapchete.get_raw_output
validation
def get_raw_output(self, tile, _baselevel_readonly=False): """ Get output raw data. This function won't work with multiprocessing, as it uses the ``threading.Lock()`` class. Parameters ---------- tile : tuple, Tile or BufferedTile If a tile index is given, a tile from the output pyramid will be assumed. Tile cannot be bigger than process tile! Returns ------- data : NumPy array or features process output """ if not isinstance(tile, (BufferedTile, tuple)): raise TypeError("'tile' must be a tuple or BufferedTile") if isinstance(tile, tuple): tile = self.config.output_pyramid.tile(*tile) if _baselevel_readonly: tile = self.config.baselevels["tile_pyramid"].tile(*tile.id) # Return empty data if zoom level is outside of process zoom levels. if tile.zoom not in self.config.zoom_levels: return self.config.output.empty(tile) # TODO implement reprojection if tile.crs != self.config.process_pyramid.crs: raise NotImplementedError(
python
{ "resource": "" }
q257358
Mapchete._extract
validation
def _extract(self, in_tile=None, in_data=None, out_tile=None): """Extract data from tile.""" return self.config.output.extract_subset(
python
{ "resource": "" }
q257359
MapcheteProcess.read
validation
def read(self, **kwargs): """ Read existing output data from a previous run. Returns ------- process output : NumPy array (raster) or feature iterator (vector) """ if self.tile.pixelbuffer > self.config.output.pixelbuffer: output_tiles = list(self.config.output_pyramid.tiles_from_bounds(
python
{ "resource": "" }
q257360
MapcheteProcess.open
validation
def open(self, input_id, **kwargs): """ Open input data. Parameters ---------- input_id : string input identifier from configuration file or file path kwargs : driver specific parameters (e.g. resampling) Returns ------- tiled input data : InputTile reprojected input data within tile """ if not isinstance(input_id, str): return
python
{ "resource": "" }
q257361
MapcheteProcess.hillshade
validation
def hillshade( self, elevation, azimuth=315.0, altitude=45.0, z=1.0, scale=1.0 ): """ Calculate hillshading from elevation data. Parameters ---------- elevation : array input elevation data azimuth : float horizontal angle of light source (315: North-West) altitude : float vertical angle of light source (90 would result in slope shading) z : float vertical exaggeration factor scale : float
python
{ "resource": "" }
q257362
MapcheteProcess.contours
validation
def contours( self, elevation, interval=100, field='elev', base=0 ): """ Extract contour lines from elevation data. Parameters ---------- elevation : array input elevation data interval : integer elevation value interval when drawing contour lines field : string output field name containing elevation value base : integer elevation base value the intervals are computed from Returns
python
{ "resource": "" }
q257363
MapcheteProcess.clip
validation
def clip( self, array, geometries, inverted=False, clip_buffer=0 ): """ Clip array by geometry. Parameters ---------- array : array raster data to be clipped geometries : iterable geometries used to clip source array inverted : bool invert clipping (default: False) clip_buffer : int buffer (in pixels) geometries before applying clip Returns
python
{ "resource": "" }
q257364
clip_array_with_vector
validation
def clip_array_with_vector( array, array_affine, geometries, inverted=False, clip_buffer=0 ): """ Clip input array with a vector list. Parameters ---------- array : array input raster data array_affine : Affine Affine object describing the raster's geolocation geometries : iterable iterable of dictionaries, where every entry has a 'geometry' and 'properties' key. inverted : bool invert clip (default: False) clip_buffer : integer buffer (in pixels) geometries before clipping Returns ------- clipped array : array """ # buffer input geometries and clean up buffered_geometries = [] for feature in geometries: feature_geom = to_shape(feature["geometry"]) if feature_geom.is_empty: continue if feature_geom.geom_type == "GeometryCollection": # for GeometryCollections apply buffer to every subgeometry # and make union buffered_geom = unary_union([ g.buffer(clip_buffer) for g in feature_geom]) else: buffered_geom = feature_geom.buffer(clip_buffer) if not buffered_geom.is_empty: buffered_geometries.append(buffered_geom)
python
{ "resource": "" }
q257365
pyramid
validation
def pyramid( input_raster, output_dir, pyramid_type=None, output_format=None, resampling_method=None, scale_method=None, zoom=None, bounds=None, overwrite=False, debug=False ): """Create tile pyramid out of input raster.""" bounds = bounds if bounds else None options = dict( pyramid_type=pyramid_type,
python
{ "resource": "" }
q257366
raster2pyramid
validation
def raster2pyramid(input_file, output_dir, options): """Create a tile pyramid out of an input raster dataset.""" pyramid_type = options["pyramid_type"] scale_method = options["scale_method"] output_format = options["output_format"] resampling = options["resampling"] zoom = options["zoom"] bounds = options["bounds"] mode = "overwrite" if options["overwrite"] else "continue" # Prepare process parameters minzoom, maxzoom = _get_zoom(zoom, input_file, pyramid_type) with rasterio.open(input_file, "r") as input_raster: output_bands = input_raster.count input_dtype = input_raster.dtypes[0] output_dtype = input_raster.dtypes[0] nodataval = input_raster.nodatavals[0] nodataval = nodataval if nodataval else 0 if output_format == "PNG" and output_bands > 3: output_bands = 3 output_dtype = 'uint8' scales_minmax = () if scale_method == "dtype_scale": for index in range(1, output_bands+1): scales_minmax += (DTYPE_RANGES[input_dtype], )
python
{ "resource": "" }
q257367
_get_zoom
validation
def _get_zoom(zoom, input_raster, pyramid_type): """Determine minimum and maximum zoomlevel.""" if not zoom: minzoom = 1 maxzoom = get_best_zoom_level(input_raster, pyramid_type) elif len(zoom) == 1: minzoom = zoom[0] maxzoom = zoom[0] elif len(zoom)
python
{ "resource": "" }
q257368
validate_values
validation
def validate_values(config, values): """ Validate whether value is found in config and has the right type. Parameters ---------- config : dict configuration dictionary values : list list of (str, type) tuples of values and value types expected in config Returns -------
python
{ "resource": "" }
q257369
get_hash
validation
def get_hash(x): """Return hash of x.""" if isinstance(x, str):
python
{ "resource": "" }
q257370
get_zoom_levels
validation
def get_zoom_levels(process_zoom_levels=None, init_zoom_levels=None): """Validate and return zoom levels.""" process_zoom_levels = _validate_zooms(process_zoom_levels) if init_zoom_levels is None: return process_zoom_levels else: init_zoom_levels = _validate_zooms(init_zoom_levels) if not
python
{ "resource": "" }
q257371
snap_bounds
validation
def snap_bounds(bounds=None, pyramid=None, zoom=None): """ Snaps bounds to tiles boundaries of specific zoom level. Parameters ---------- bounds : bounds to be snapped pyramid : TilePyramid zoom : int Returns ------- Bounds(left, bottom, right, top) """ if not isinstance(bounds, (tuple, list)): raise TypeError("bounds must be either a tuple or a list") if len(bounds) != 4: raise ValueError("bounds has to have exactly four values")
python
{ "resource": "" }
q257372
clip_bounds
validation
def clip_bounds(bounds=None, clip=None): """ Clips bounds by clip. Parameters ---------- bounds : bounds to be clipped clip : clip bounds Returns ------- Bounds(left, bottom, right, top) """ bounds = Bounds(*bounds) clip = Bounds(*clip) return Bounds(
python
{ "resource": "" }
q257373
_validate_zooms
validation
def _validate_zooms(zooms): """ Return a list of zoom levels. Following inputs are converted: - int --> [int] - dict{min, max} --> range(min, max + 1) - [int] --> [int] - [int, int] --> range(smaller int, bigger int + 1) """ if isinstance(zooms, dict): if any([a not in zooms for a in ["min", "max"]]): raise MapcheteConfigError("min and max zoom required") zmin = _validate_zoom(zooms["min"]) zmax = _validate_zoom(zooms["max"]) if zmin > zmax: raise MapcheteConfigError( "max zoom must not be smaller than
python
{ "resource": "" }
q257374
_raw_at_zoom
validation
def _raw_at_zoom(config, zooms): """Return parameter dictionary per zoom level.""" params_per_zoom = {} for zoom in zooms: params = {} for name, element in config.items(): if name not in _RESERVED_PARAMETERS: out_element = _element_at_zoom(name,
python
{ "resource": "" }
q257375
_element_at_zoom
validation
def _element_at_zoom(name, element, zoom): """ Return the element filtered by zoom level. - An input integer or float gets returned as is. - An input string is checked whether it starts with "zoom". Then, the provided zoom level gets parsed and compared with the actual zoom level. If zoom levels match, the element gets returned. TODOs/gotchas: - Elements are unordered, which can lead to unexpected results when defining the YAML config. - Provided zoom levels for one element in config file are not allowed to "overlap", i.e. there is not yet a decision mechanism implemented which handles this case. """ # If element is a dictionary, analyze subitems. if isinstance(element, dict): if "format" in element: # we have an input or output driver here return element out_elements = {} for sub_name, sub_element in element.items(): out_element = _element_at_zoom(sub_name, sub_element, zoom) if name == "input": out_elements[sub_name] = out_element elif out_element is not None:
python
{ "resource": "" }
q257376
_filter_by_zoom
validation
def _filter_by_zoom(element=None, conf_string=None, zoom=None): """Return element only if zoom condition matches with config string.""" for op_str, op_func in [ # order of operators is important: # prematurely return in cases of "<=" or ">=", otherwise # _strip_zoom() cannot parse config strings starting with "<" # or ">"
python
{ "resource": "" }
q257377
_strip_zoom
validation
def _strip_zoom(input_string, strip_string): """Return zoom level as integer or throw error.""" try:
python
{ "resource": "" }
q257378
_flatten_tree
validation
def _flatten_tree(tree, old_path=None): """Flatten dict tree into dictionary where keys are paths of old dict.""" flat_tree = [] for key, value in tree.items(): new_path = "/".join([old_path,
python
{ "resource": "" }
q257379
_unflatten_tree
validation
def _unflatten_tree(flat): """Reverse tree flattening.""" tree = {} for key, value in flat.items(): path = key.split("/") # we are at the end of a branch if len(path) == 1: tree[key] = value # there are more branches else: # create new dict if not path[0] in tree: tree[path[0]] = _unflatten_tree({"/".join(path[1:]): value}) # add keys to existing dict else:
python
{ "resource": "" }
q257380
MapcheteConfig.bounds
validation
def bounds(self): """Process bounds as defined in the configuration.""" if self._raw["bounds"] is None: return self.process_pyramid.bounds
python
{ "resource": "" }
q257381
MapcheteConfig.init_bounds
validation
def init_bounds(self): """ Process bounds this process is currently initialized with. This gets triggered by using the ``init_bounds`` kwarg. If not set, it will be equal to self.bounds. """ if
python
{ "resource": "" }
q257382
MapcheteConfig.effective_bounds
validation
def effective_bounds(self): """ Effective process bounds required to initialize inputs. Process bounds sometimes have to be larger, because all intersecting process tiles have to be covered as well. """ return snap_bounds( bounds=clip_bounds(bounds=self.init_bounds, clip=self.process_pyramid.bounds),
python
{ "resource": "" }
q257383
MapcheteConfig.output
validation
def output(self): """Output object of driver.""" output_params = dict( self._raw["output"], grid=self.output_pyramid.grid, pixelbuffer=self.output_pyramid.pixelbuffer, metatiling=self.output_pyramid.metatiling ) if "path" in output_params: output_params.update( path=absolute_path(path=output_params["path"], base_dir=self.config_dir) ) if "format" not in output_params:
python
{ "resource": "" }
q257384
MapcheteConfig.input
validation
def input(self): """ Input items used for process stored in a dictionary. Keys are the hashes of the input parameters, values the respective InputData classes. """ # the delimiters are used by some input drivers delimiters = dict( zoom=self.init_zoom_levels, bounds=self.init_bounds, process_bounds=self.bounds, effective_bounds=self.effective_bounds ) # get input items only of initialized zoom levels raw_inputs = { # convert input definition to hash get_hash(v): v for zoom in self.init_zoom_levels if "input" in self._params_at_zoom[zoom] # to preserve file groups, "flatten" the input tree and use # the tree paths as keys for key, v in _flatten_tree(self._params_at_zoom[zoom]["input"]) if v is not None } initalized_inputs = {} for k, v in raw_inputs.items(): # for files and tile directories if isinstance(v, str): logger.debug("load input reader for simple input %s", v) try: reader = load_input_reader( dict( path=absolute_path(path=v, base_dir=self.config_dir), pyramid=self.process_pyramid, pixelbuffer=self.process_pyramid.pixelbuffer, delimiters=delimiters ), readonly=self.mode == "readonly") except Exception as e: logger.exception(e) raise MapcheteDriverError("error when loading input %s: %s" % (v, e))
python
{ "resource": "" }
q257385
MapcheteConfig.baselevels
validation
def baselevels(self): """ Optional baselevels configuration. baselevels: min: <zoom> max: <zoom> lower: <resampling method> higher: <resampling method> """ if "baselevels" not in self._raw: return {} baselevels = self._raw["baselevels"] minmax = {k: v for k, v in baselevels.items() if k in ["min", "max"]} if not minmax: raise MapcheteConfigError("no min and max values given for baselevels") for v in minmax.values(): if not isinstance(v, int) or v < 0: raise MapcheteConfigError( "invalid baselevel zoom parameter given: %s" % minmax.values() ) zooms = list(range( minmax.get("min", min(self.zoom_levels)), minmax.get("max", max(self.zoom_levels)) + 1) ) if not set(self.zoom_levels).difference(set(zooms)):
python
{ "resource": "" }
q257386
MapcheteConfig.params_at_zoom
validation
def params_at_zoom(self, zoom): """ Return configuration parameters snapshot for zoom as dictionary. Parameters ---------- zoom : int zoom level Returns ------- configuration snapshot : dictionary zoom level dependent process configuration """ if zoom not in self.init_zoom_levels: raise ValueError( "zoom level not available with current configuration") out = dict(self._params_at_zoom[zoom], input={}, output=self.output) if "input" in self._params_at_zoom[zoom]: flat_inputs = {}
python
{ "resource": "" }
q257387
MapcheteConfig.area_at_zoom
validation
def area_at_zoom(self, zoom=None): """ Return process bounding box for zoom level. Parameters ---------- zoom : int or None if None, the union of all zoom level areas is returned Returns ------- process area : shapely geometry """ if zoom is None: if not self._cache_full_process_area: logger.debug("calculate process area ...") self._cache_full_process_area = cascaded_union([ self._area_at_zoom(z) for z in self.init_zoom_levels]
python
{ "resource": "" }
q257388
MapcheteConfig.bounds_at_zoom
validation
def bounds_at_zoom(self, zoom=None): """ Return process bounds for zoom level. Parameters ---------- zoom : integer or list Returns ------- process bounds : tuple
python
{ "resource": "" }
q257389
zoom_index_gen
validation
def zoom_index_gen( mp=None, out_dir=None, zoom=None, geojson=False, gpkg=False, shapefile=False, txt=False, vrt=False, fieldname="location", basepath=None, for_gdal=True, threading=False, ): """ Generate indexes for given zoom level. Parameters ---------- mp : Mapchete object process output to be indexed out_dir : path optionally override process output directory zoom : int zoom level to be processed geojson : bool generate GeoJSON index (default: False) gpkg : bool generate GeoPackage index (default: False) shapefile : bool generate Shapefile index (default: False) txt : bool generate tile path list textfile (default: False) vrt : bool GDAL-style VRT file (default: False) fieldname : str field name which contains paths of tiles (default: "location") basepath : str if set, use custom base path instead of output path for_gdal : bool use GDAL compatible remote paths, i.e. add "/vsicurl/" before path (default: True) """ for zoom in get_zoom_levels(process_zoom_levels=zoom): with ExitStack() as es: # get index writers for all enabled formats index_writers = [] if geojson: index_writers.append( es.enter_context( VectorFileWriter( driver="GeoJSON", out_path=_index_file_path(out_dir, zoom, "geojson"), crs=mp.config.output_pyramid.crs, fieldname=fieldname ) ) ) if gpkg: index_writers.append( es.enter_context( VectorFileWriter( driver="GPKG", out_path=_index_file_path(out_dir, zoom, "gpkg"), crs=mp.config.output_pyramid.crs, fieldname=fieldname ) ) ) if shapefile: index_writers.append( es.enter_context( VectorFileWriter( driver="ESRI Shapefile", out_path=_index_file_path(out_dir, zoom, "shp"), crs=mp.config.output_pyramid.crs, fieldname=fieldname ) ) ) if txt: index_writers.append( es.enter_context( TextFileWriter(out_path=_index_file_path(out_dir, zoom, "txt")) ) ) if vrt: index_writers.append( es.enter_context( VRTFileWriter(
python
{ "resource": "" }
q257390
InputData.profile
validation
def profile(self): """Return raster metadata.""" with rasterio.open(self.path, "r")
python
{ "resource": "" }
q257391
execute
validation
def execute(mp): """ Example process for testing. Inputs: ------- file1 raster file Parameters: ----------- Output: ------- np.ndarray """ # Reading and writing data works like this: with mp.open("file1", resampling="bilinear") as raster_file: if raster_file.is_empty(): return
python
{ "resource": "" }
q257392
OutputData.is_valid_with_config
validation
def is_valid_with_config(self, config): """ Check if output format is valid with other process parameters. Parameters ---------- config : dictionary output configuration parameters Returns ------- is_valid : bool """ validate_values(config, [("schema", dict), ("path", str)]) validate_values(config["schema"], [("properties", dict), ("geometry",
python
{ "resource": "" }
q257393
InputTile.read
validation
def read(self, validity_check=True, no_neighbors=False, **kwargs): """ Read data from process output. Parameters ---------- validity_check : bool run geometry validity check (default: True) no_neighbors : bool don't include neighbor tiles if there is a pixelbuffer (default: False) Returns -------
python
{ "resource": "" }
q257394
available_output_formats
validation
def available_output_formats(): """ Return all available output formats. Returns ------- formats : list all available output formats """ output_formats = [] for v in pkg_resources.iter_entry_points(DRIVERS_ENTRY_POINT): driver_ = v.load() if hasattr(driver_, "METADATA") and (
python
{ "resource": "" }
q257395
available_input_formats
validation
def available_input_formats(): """ Return all available input formats. Returns ------- formats : list all available input formats """ input_formats = [] for v in pkg_resources.iter_entry_points(DRIVERS_ENTRY_POINT):
python
{ "resource": "" }
q257396
load_output_writer
validation
def load_output_writer(output_params, readonly=False): """ Return output class of driver. Returns ------- output : ``OutputData`` output writer object """ if not isinstance(output_params, dict): raise TypeError("output_params must be a dictionary") driver_name = output_params["format"]
python
{ "resource": "" }
q257397
load_input_reader
validation
def load_input_reader(input_params, readonly=False): """ Return input class of driver. Returns ------- input_params : ``InputData`` input parameters """ logger.debug("find input reader with params %s", input_params) if not isinstance(input_params, dict): raise TypeError("input_params must be a dictionary") if "abstract" in input_params: driver_name = input_params["abstract"]["format"] elif "path" in input_params: if os.path.splitext(input_params["path"])[1]: input_file = input_params["path"] driver_name = driver_from_file(input_file) else: logger.debug("%s is a directory", input_params["path"]) driver_name =
python
{ "resource": "" }
q257398
driver_from_file
validation
def driver_from_file(input_file): """ Guess driver from file extension. Returns ------- driver : string driver name """ file_ext = os.path.splitext(input_file)[1].split(".")[1] if file_ext not in _file_ext_to_driver(): raise MapcheteDriverError( "no driver could be found for file extension %s" % file_ext
python
{ "resource": "" }
q257399
write_output_metadata
validation
def write_output_metadata(output_params): """Dump output JSON and verify parameters if output metadata exist.""" if "path" in output_params: metadata_path = os.path.join(output_params["path"], "metadata.json") logger.debug("check for output %s", metadata_path) try: existing_params = read_output_metadata(metadata_path) logger.debug("%s exists", metadata_path) logger.debug("existing output parameters: %s", pformat(existing_params)) existing_tp = existing_params["pyramid"] current_params = params_to_dump(output_params) logger.debug("current output parameters: %s", pformat(current_params)) current_tp = BufferedTilePyramid(**current_params["pyramid"]) if existing_tp != current_tp: raise MapcheteConfigError( "pyramid definitions between existing and new output do not match: " "%s != %s" % (existing_tp, current_tp) ) existing_format = existing_params["driver"]["format"]
python
{ "resource": "" }