_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q6200
BaseBaseModel.find_by_id
train
def find_by_id(self, _id, projection=None): """find record by _id """ if isinstance(_id, list) or isinstance(_id, tuple): return list(self.__collect.find(
python
{ "resource": "" }
q6201
to_list_str
train
def to_list_str(value, encode=None): """recursively convert list content into string :arg list value: The list that need to be converted. :arg function encode: Function used to encode object. """ result = [] for index, v in enumerate(value):
python
{ "resource": "" }
q6202
to_dict_str
train
def to_dict_str(origin_value, encode=None): """recursively convert dict content into string """ value = copy.deepcopy(origin_value) for k, v in value.items(): if isinstance(v, dict): value[k] = to_dict_str(v, encode) continue
python
{ "resource": "" }
q6203
default_encode
train
def default_encode(v): """convert ObjectId, datetime, date into string """ if isinstance(v, ObjectId): return unicode_type(v) if isinstance(v, datetime):
python
{ "resource": "" }
q6204
to_str
train
def to_str(v, encode=None): """convert any list, dict, iterable and primitives object to string """ if isinstance(v, basestring_type): return v
python
{ "resource": "" }
q6205
get_base_dir
train
def get_base_dir(currfile, dir_level_num=3): """ find certain path according to currfile """
python
{ "resource": "" }
q6206
join_sys_path
train
def join_sys_path(currfile, dir_level_num=3): """ find certain path then load into sys path """ if os.path.isdir(currfile): root_path = currfile
python
{ "resource": "" }
q6207
camel_to_underscore
train
def camel_to_underscore(name): """ convert CamelCase style to under_score_case """ as_list = [] length = len(name) for index, i in
python
{ "resource": "" }
q6208
encode_http_params
train
def encode_http_params(**kw): ''' url paremeter encode ''' try: _fo = lambda k, v: '{name}={value}'.format( name=k, value=to_basestring(quote(v))) except: _fo =
python
{ "resource": "" }
q6209
_init_file_logger
train
def _init_file_logger(logger, level, log_path, log_size, log_count): """ one logger only have one level RotatingFileHandler """ if level not in [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]:
python
{ "resource": "" }
q6210
Session._processor
train
def _processor(self): """Application processor to setup session for every request"""
python
{ "resource": "" }
q6211
Session._load
train
def _load(self): """Load the session from the store, by the id from cookie""" self.session_id = self._session_object.get_session_id() # protection against session_id tampering if self.session_id and not self._valid_session_id(self.session_id): self.session_id = None if self.session_id: d = self.store[self.session_id] if isinstance(d, dict) and d: self.update(d) if not self.session_id:
python
{ "resource": "" }
q6212
Store.encode
train
def encode(self, session_data): """encodes session dict as a string""" pickled
python
{ "resource": "" }
q6213
Store.decode
train
def decode(self, session_data): """decodes the data to get back the session dict """
python
{ "resource": "" }
q6214
cli
train
def cli(): """An improved shell command, based on konch.""" from flask.globals import _app_ctx_stack app = _app_ctx_stack.top.app options = {key: app.config.get(key, DEFAULTS[key]) for key in DEFAULTS.keys()} base_context = {"app": app} if options["KONCH_FLASK_IMPORTS"]: base_context.update(get_flask_imports()) context = dict(base_context) if options["KONCH_FLASK_SHELL_CONTEXT"]: flask_context = app.make_shell_context() context.update(flask_context) context.update(options["KONCH_CONTEXT"]) def context_formatter(ctx): formatted_base = ", ".join(sorted(base_context.keys())) ret = "\n{FLASK}\n{base_context}\n".format( FLASK=click.style("Flask:", bold=True), base_context=formatted_base ) if options["KONCH_FLASK_SHELL_CONTEXT"]: variables = ", ".join(sorted(flask_context.keys())) ret += "\n{ADDITIONAL}\n{variables}\n".format( ADDITIONAL=click.style( "Flask shell context (see shell_context_processor()):", bold=True ), variables=variables, ) if options["KONCH_CONTEXT"]: variables = ", ".join(sorted(options["KONCH_CONTEXT"].keys()))
python
{ "resource": "" }
q6215
LazyUUIDTask.replace
train
def replace(self): """ Performs conversion to the regular Task object, referenced by the stored UUID. """
python
{ "resource": "" }
q6216
LazyUUIDTaskSet.replace
train
def replace(self): """ Performs conversion to the regular TaskQuerySet object, referenced by the stored UUIDs. """
python
{ "resource": "" }
q6217
TaskResource._update_data
train
def _update_data(self, data, update_original=False, remove_missing=False): """ Low level update of the internal _data dict. Data which are coming as updates should already be serialized. If update_original is True, the original_data dict is updated as well. """ self._data.update(dict((key, self._deserialize(key, value)) for key, value in data.items())) # In certain situations, we want to treat missing keys as
python
{ "resource": "" }
q6218
TaskResource.export_data
train
def export_data(self): """ Exports current data contained in the Task as JSON """ # We need to remove spaces for TW-1504, use custom separators data_tuples = ((key, self._serialize(key, value)) for key, value in six.iteritems(self._data))
python
{ "resource": "" }
q6219
Task.from_input
train
def from_input(cls, input_file=sys.stdin, modify=None, backend=None): """ Creates a Task object, directly from the stdin, by reading one line. If modify=True, two lines are used, first line interpreted as the original state of the Task object, and second line as its new, modified value. This is consistent with the TaskWarrior's hook system. Object created by this method should not be saved, deleted or refreshed, as t could create a infinite loop. For this reason, TaskWarrior instance is set to None. Input_file argument can be used to specify the input file, but defaults to sys.stdin.
python
{ "resource": "" }
q6220
TaskQuerySet.filter
train
def filter(self, *args, **kwargs): """ Returns a new TaskQuerySet with the given filters added. """ clone = self._clone() for f in args: clone.filter_obj.add_filter(f) for
python
{ "resource": "" }
q6221
CSequenceMatcher.set_seq1
train
def set_seq1(self, a): """Same as SequenceMatcher.set_seq1, but check for non-list inputs implementation.""" if a is self.a: return self.a = a if not isinstance(self.a, list): self.a
python
{ "resource": "" }
q6222
CSequenceMatcher.set_seq2
train
def set_seq2(self, b): """Same as SequenceMatcher.set_seq2, but uses the c chainb implementation. """ if b is self.b and hasattr(self, 'isbjunk'): return self.b = b if not isinstance(self.a, list): self.a = list(self.a) if not isinstance(self.b, list): self.b = list(self.b) # Types must be hashable to work in the c layer. This check lines will # raise the correct error if they are *not* hashable. [hash(x) for x in self.a] [hash(x) for x in self.b]
python
{ "resource": "" }
q6223
CSequenceMatcher.get_matching_blocks
train
def get_matching_blocks(self): """Same as SequenceMatcher.get_matching_blocks, but calls through to a faster loop for find_longest_match. The rest is the same. """ if self.matching_blocks is not None: return self.matching_blocks matching_blocks = _cdifflib.matching_blocks(self)
python
{ "resource": "" }
q6224
_tostream
train
def _tostream(parser, obj, stream, skipprepack = False): """ Compatible to old parsers """ if hasattr(parser, 'tostream'): return parser.tostream(obj, stream, skipprepack) else: data = parser.tobytes(obj, skipprepack) cls = type(parser) if cls not
python
{ "resource": "" }
q6225
_to_str
train
def _to_str(dumped_val, encoding='utf-8', ordered=True): """ Convert bytes in a dump value to str, allowing json encode """ _dict = OrderedDict if ordered else dict if isinstance(dumped_val, dict): return OrderedDict((k, _to_str(v, encoding))
python
{ "resource": "" }
q6226
NamedStruct._unpack
train
def _unpack(self, data): ''' Unpack a struct from bytes. For parser internal use. ''' #self._logger.log(logging.DEBUG, 'unpacking %r', self) current = self while current is not None:
python
{ "resource": "" }
q6227
NamedStruct._prepack
train
def _prepack(self): ''' Prepack stage. For parser internal use. ''' current = self while current is not None: current._parser.prepack(current, skip_self = True) current = getattr(current, '_sub', None) current = self
python
{ "resource": "" }
q6228
NamedStruct._getextra
train
def _getextra(self): ''' Get the extra data of this struct. ''' current = self while hasattr(current, '_sub'):
python
{ "resource": "" }
q6229
Parser.paddingsize2
train
def paddingsize2(self, realsize): ''' Return a padded size from realsize, for NamedStruct internal use.
python
{ "resource": "" }
q6230
typedef.parser
train
def parser(self): ''' Get parser for this type. Create the parser on first call. '''
python
{ "resource": "" }
q6231
enum.formatter
train
def formatter(self, value): ''' Format a enumerate value to enumerate names if possible. Used to generate human readable dump result. ''' if not self._bitwise: n = self.getName(value) if n is None: return value else: return n else: names = [] for k,v in sorted(self._values.items(), key=lambda x: x[1], reverse=True):
python
{ "resource": "" }
q6232
OptionalParser.packto
train
def packto(self, namedstruct, stream): """ Pack a struct to a stream """ if hasattr(namedstruct, self.name):
python
{ "resource": "" }
q6233
Meso._get_response
train
def _get_response(self, endpoint, request_dict): """ Returns a dictionary of data requested by each function. Arguments: ---------- endpoint: string, mandatory Set in all other methods, this is the API endpoint specific to each function. request_dict: string, mandatory A dictionary of parameters that are formatted into the API call. Returns: -------- response: A dictionary that has been dumped from JSON. Raises: ------- MesoPyError: Overrides the exceptions given in the requests library to give more custom error messages. Connection_error occurs if no internet connection exists. Timeout_error occurs if the request takes too long and redirect_error is shown if the url is formatted incorrectly. """ http_error = 'Could not connect to the API. This could be because you have no internet connection, a parameter' \ ' was input incorrectly, or the API is currently down. Please try again.' json_error = 'Could not retrieve JSON values. Try again with a shorter date range.' # For python 3.4
python
{ "resource": "" }
q6234
BaseCollection.avg
train
def avg(self, key=None): """ Get the average value of a given key. :param key: The key to get the average for :type key: mixed :rtype: float or int """
python
{ "resource": "" }
q6235
BaseCollection.diff
train
def diff(self, items): """ Diff the collections with the given items :param items: The items to diff with
python
{ "resource": "" }
q6236
BaseCollection.each
train
def each(self, callback): """ Execute a callback over each item. .. code:: collection = Collection([1, 2, 3]) collection.each(lambda x: x + 3) .. warning:: It only applies the callback but does not modify the collection's items. Use the `transform() <#backpack.Collection.transform>`_ method to modify the
python
{ "resource": "" }
q6237
BaseCollection.every
train
def every(self, step, offset=0): """ Create a new collection consisting of every n-th element. :param step: The step size :type step: int :param offset: The start offset :type offset: int :rtype: Collection """
python
{ "resource": "" }
q6238
BaseCollection.without
train
def without(self, *keys): """ Get all items except for those with the specified keys. :param keys: The keys to remove :type keys: tuple :rtype: Collection """ items = copy(self.items)
python
{ "resource": "" }
q6239
BaseCollection.only
train
def only(self, *keys): """ Get the items with the specified keys. :param keys: The keys to keep :type keys: tuple :rtype: Collection """ items = []
python
{ "resource": "" }
q6240
BaseCollection.filter
train
def filter(self, callback=None): """ Run a filter over each of the items. :param callback: The filter callback :type callback: callable or None :rtype: Collection """ if callback:
python
{ "resource": "" }
q6241
BaseCollection.where
train
def where(self, key, value): """ Filter items by the given key value pair. :param key: The key to filter by :type key: str :param value: The value to filter by :type value: mixed
python
{ "resource": "" }
q6242
BaseCollection.first
train
def first(self, callback=None, default=None): """ Get the first item of the collection. :param default: The default value :type default: mixed """
python
{ "resource": "" }
q6243
BaseCollection.flatten
train
def flatten(self): """ Get a flattened list of the items in the collection. :rtype: Collection """ def _flatten(d): if isinstance(d, dict): for v in d.values(): for nested_v in _flatten(v): yield nested_v elif isinstance(d, list):
python
{ "resource": "" }
q6244
BaseCollection.forget
train
def forget(self, *keys): """ Remove an item from the collection by key. :param keys: The keys to remove :type keys: tuple :rtype: Collection """
python
{ "resource": "" }
q6245
BaseCollection.get
train
def get(self, key, default=None): """ Get an element of the collection. :param key: The index of the element :type key: mixed :param default: The default value to return :type default: mixed
python
{ "resource": "" }
q6246
BaseCollection.implode
train
def implode(self, value, glue=''): """ Concatenate values of a given key as a string. :param value: The value :type value: str :param glue: The glue :type glue: str :rtype: str """
python
{ "resource": "" }
q6247
BaseCollection.last
train
def last(self, callback=None, default=None): """ Get the last item of the collection. :param default: The default value :type default: mixed """
python
{ "resource": "" }
q6248
BaseCollection.pluck
train
def pluck(self, value, key=None): """ Get a list with the values of a given key. :rtype: Collection """ if key:
python
{ "resource": "" }
q6249
BaseCollection.max
train
def max(self, key=None): """ Get the max value of a given key. :param key: The key :type key: str or None :rtype: mixed """ def _max(result, item): val =
python
{ "resource": "" }
q6250
BaseCollection.min
train
def min(self, key=None): """ Get the min value of a given key. :param key: The key :type key: str or None :rtype: mixed """ def _min(result, item): val =
python
{ "resource": "" }
q6251
BaseCollection.for_page
train
def for_page(self, page, per_page): """ "Paginate" the collection by slicing it into a smaller collection. :param page: The current page :type page: int :param per_page: Number of items by slice
python
{ "resource": "" }
q6252
BaseCollection.pull
train
def pull(self, key, default=None): """ Pulls an item from the collection. :param key: The key :type key: mixed :param default: The default value :type default: mixed :rtype:
python
{ "resource": "" }
q6253
BaseCollection.reject
train
def reject(self, callback): """ Create a collection of all elements that do not pass a given truth test. :param callback: The truth test :type callback: callable :rtype: Collection """
python
{ "resource": "" }
q6254
BaseCollection.sort
train
def sort(self, callback=None): """ Sort through each item with a callback. :param callback: The callback :type callback: callable or None :rtype: Collection """ items = self.items
python
{ "resource": "" }
q6255
BaseCollection.sum
train
def sum(self, callback=None): """ Get the sum of the given values. :param callback: The callback :type callback: callable or string or None :rtype: mixed """ if callback is None:
python
{ "resource": "" }
q6256
BaseCollection.zip
train
def zip(self, *items): """ Zip the collection together with one or more arrays. :param items: The items to zip :type items: list
python
{ "resource": "" }
q6257
BaseCollection.merge
train
def merge(self, items): """ Merge the collection with the given items. :param items: The items to merge :type items: list or Collection :rtype: Collection """ if isinstance(items, BaseCollection): items = items.all()
python
{ "resource": "" }
q6258
BaseCollection.transform
train
def transform(self, callback): """ Transform each item in the collection using a callback. :param callback: The callback :type callback: callable
python
{ "resource": "" }
q6259
BaseCollection._value_retriever
train
def _value_retriever(self, value): """ Get a value retrieving callback. :type value: mixed :rtype: callable
python
{ "resource": "" }
q6260
buildvrt
train
def buildvrt(input_file_list, output_file, relative=True, **kwargs): """Build a VRT See also: https://www.gdal.org/gdalbuildvrt.html You can find the possible BuildVRTOptions (**kwargs**) here: https://github.com/nextgis/pygdal/blob/78a793057d2162c292af4f6b240e19da5d5e52e2/2.1.0/osgeo/gdal.py#L1051 Arguments: input_file_list {list of str or Path objects} -- List of input files. output_file {str or Path object} -- Output file (VRT). Keyword Arguments: relative {bool} -- If ``True``, the ``input_file_list`` paths are converted to relative paths (relative to the output file) and the VRT works even if the data is moved somewhere else - given that the relative location of theVRT and the input files does not chance! **kwargs {} -- BuildVRTOptions - see function description for a link to . Returns: [int] -- If successful, 0 is returned as exit code. """ # create destination directory if not Path(output_file).parent.exists(): Path(output_file).parent.mkdir(parents=True, exist_ok=True) # make sure we have absolute paths and strings since BuildVRT does not like something else input_file_list = [str(Path(p).absolute()) for p in input_file_list]
python
{ "resource": "" }
q6261
rasterize
train
def rasterize(src_vector: str, burn_attribute: str, src_raster_template: str, dst_rasterized: str, gdal_dtype: int = 4): """Rasterize the values of a spatial vector file. Arguments: src_vector {str}} -- A OGR vector file (e.g. GeoPackage, ESRI Shapefile) path containing the data to be rasterized. burn_attribute {str} -- The attribute of the vector data to be burned in the raster. src_raster_template {str} -- Path to a GDAL raster file to be used as template for the rasterized data. dst_rasterized {str} -- Path of the destination file. gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32. See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up tables. Returns: None """ data = gdal.Open(str(src_raster_template), # str for the case that a Path instance arrives here gdalconst.GA_ReadOnly) geo_transform = data.GetGeoTransform() #source_layer = data.GetLayer() # x_max = x_min + geo_transform[1] * data.RasterXSize # y_min = y_max + geo_transform[5] * data.RasterYSize x_res = data.RasterXSize y_res = data.RasterYSize mb_v = ogr.Open(src_vector) mb_l = mb_v.GetLayer() target_ds = gdal.GetDriverByName('GTiff').Create(dst_rasterized,
python
{ "resource": "" }
q6262
convert_polygons_to_lines
train
def convert_polygons_to_lines(src_polygons, dst_lines, crs=None, add_allone_col=False): """Convert polygons to lines. Arguments: src_polygons {path to geopandas-readable file} -- Filename of the the polygon vector dataset to be converted to lines. dst_lines {[type]} -- Filename where to write the line vector dataset to. Keyword Arguments: crs {dict or str} -- Output projection parameters as string or in dictionary format. This will reproject the data when a crs is given (not {None}) (default: {None}). add_allone_col {bool} -- Add an additional attribute column with all ones. This is useful, e.g. in case you want to use the lines with gdal_proximity afterwards
python
{ "resource": "" }
q6263
dtype_checker_df
train
def dtype_checker_df(df, dtype, return_=None): """Check if there are NaN values of values outside of a given datatype range. Arguments: df {dataframe} -- A dataframe. dtype {str} -- The datatype to check for. Keyword Arguments: return_ {str} -- Returns a boolean dataframe with the values not in the range of the dtype ('all'), the row ('rowsums') or column ('colsums') sums of that dataframe or an exit code 1 (None, default) if any of the values is not in the range. Returns: [int or DataFrame or Series] -- If no value is out of the range exit code 0 is returned, else depends on return_. """ dtype_range = dtype_ranges[dtype] df_out_of_range = (df < dtype_range[0]) | (df > dtype_range[1]) | (~np.isfinite(df))
python
{ "resource": "" }
q6264
EOCubeChunk._get_spatial_bounds
train
def _get_spatial_bounds(self): """Get the spatial bounds of the chunk.""" # This should be a MultiRasterIO method with rasterio.open(self._mrio._get_template_for_given_resolution(self._mrio.dst_res, "path")) as src_layer: pass # later we need src_layer for src_layer.window_transform(win) win_transform = src_layer.window_transform(self._window)
python
{ "resource": "" }
q6265
EOCubeChunk.robust_data_range
train
def robust_data_range(arr, robust=False, vmin=None, vmax=None): """Get a robust data range, i.e. 2nd and 98th percentile for vmin, vmax parameters.""" # from the seaborn code # https://github.com/mwaskom/seaborn/blob/3a3ec75befab52c02650c62772a90f8c23046038/seaborn/matrix.py#L201 def _get_vmin_vmax(arr2d, vmin=None, vmax=None): if vmin is None: vmin = np.percentile(arr2d, 2) if robust else arr2d.min() if vmax is None: vmax = np.percentile(arr2d, 98) if robust else arr2d.max() return vmin, vmax if len(arr.shape) == 3
python
{ "resource": "" }
q6266
EOCubeChunk.from_eocube
train
def from_eocube(eocube, ji): """Create a EOCubeChunk object from an EOCube object."""
python
{ "resource": "" }
q6267
EOCubeSceneCollection.get_chunk
train
def get_chunk(self, ji): """Get a EOCubeChunk""" return EOCubeSceneCollectionChunk(ji=ji, df_layers=self.df_layers, chunksize=self.chunksize, variables=self.variables,
python
{ "resource": "" }
q6268
get_dataset
train
def get_dataset(dataset="s2l1c"): """Get a specific sampledata to play around. So far the following sampledata exist: * 's2l1c': One Sentinel-2 Level 1C scene with a reference dataset. * 'lsts': A time series of 105 Landsat scenes each with the bands b3 (red), b4 (nir), b5 (swir1) and fmask. Keyword Arguments: dataset {str} -- The name of the dataset (default: {'s2l1c'}). Returns: [dict] -- A dictionary with paths and information about the sampledata. """ if dataset == "s2l1c": search_string = os.path.join(DIR_DATA, dataset, "**", "*_B??.jp2") files = glob.glob(search_string, recursive=True) if not files: raise IOError(f"Could not find raster files of the s2l1c dataset. Search string: {search_string}") basename_splitted = [pth.replace(".jp2", "").split("_")[-2:] for pth in files] dset = {"raster_files": files, "raster_bands": [ele[1] for ele in basename_splitted], "raster_times": [ele[0] for ele in basename_splitted], "vector_file": os.path.join(DIR_DATA, "s2l1c", "s2l1c_ref.gpkg"), "vector_file_osm": os.path.join(DIR_DATA, "s2l1c", "gis_osm_landuse-water_a_free_1_area-10000-to-500000.gpkg")} elif dataset == "lsts": search_string = os.path.join(DIR_DATA, dataset, "**", "*.tif") files = glob.glob(search_string, recursive=True) if not files: raise IOError(f"Could not find raster files of the lsts dataset. Search string: {search_string}") basename_splitted = [os.path.basename(pth).replace(".tif", "").split("_") for
python
{ "resource": "" }
q6269
windows_from_blocksize
train
def windows_from_blocksize(blocksize_xy, width, height): """Create rasterio.windows.Window instances with given size which fully cover a raster. Arguments: blocksize_xy {int or list of two int} -- [description] width {int} -- With of the raster for which to create the windows. height {int} -- Heigth of the raster for which to create the windows. Returns: list -- List of windows according to the following format ``[[<row-index>, <column index>], rasterio.windows.Window(<col_off>, <row_off>, <width>, <height>)]``. """ # checks the blocksize input value_error_msg = "'blocksize must be an integer or a list of two integers.'" if isinstance(blocksize_xy, int): blockxsize, blockysize = (blocksize_xy, blocksize_xy) elif isinstance(blocksize_xy, list): if len(blocksize_xy) != 2: raise ValueError(value_error_msg) else: if not all([isinstance(blocksize_xy[0], int), isinstance(blocksize_xy[1], int)]): raise ValueError(value_error_msg) blockxsize, blockysize = blocksize_xy else: raise ValueError(value_error_msg) # create the col_off and row_off elements for all windows n_cols = int(np.ceil(width / blockxsize)) n_rows = int(np.ceil(height / blockysize)) col = list(range(n_cols)) * n_rows
python
{ "resource": "" }
q6270
MultiRasterIO._get_dst_resolution
train
def _get_dst_resolution(self, dst_res=None): """Get default resolution, i.e. the highest resolution or smallest cell size.""" if dst_res is None:
python
{ "resource": "" }
q6271
MultiRasterIO.windows_from_blocksize
train
def windows_from_blocksize(self, blocksize_xy=512): """Create rasterio.windows.Window instances with given size which fully cover the raster. Arguments: blocksize_xy {int or list of two int} -- Size of the window. If one integer is given it defines the width and height of the window. If a list of two integers if given the first defines the width and the second the height. Returns: None -- But the attributes ``windows``, ``windows_row`` and ``windows_col`` are updated. """ meta = self._get_template_for_given_resolution(self.dst_res, "meta") width = meta["width"]
python
{ "resource": "" }
q6272
MultiRasterIO._resample
train
def _resample(self, arrays, ji_windows): """Resample all arrays with potentially different resolutions to a common resolution.""" # get a destination array template win_dst = ji_windows[self.dst_res] aff_dst = self._layer_meta[self._res_indices[self.dst_res][0]]["transform"] arrays_dst = list() for i, array in enumerate(arrays): arr_dst = np.zeros((int(win_dst.height), int(win_dst.width))) if self._layer_resolution[i] > self.dst_res: resampling = getattr(Resampling, self.upsampler)
python
{ "resource": "" }
q6273
extract
train
def extract(src_vector: str, burn_attribute: str, src_raster: list, dst_names: list, dst_dir: str, src_raster_template: str = None, gdal_dtype: int = 4, n_jobs: int = 1): """Extract values from list of single band raster for pixels overlapping with a vector data. The extracted data will be stored in the ``dst_dir`` by using the ``dst_names`` for the filename. If a file with a given name already exists the raster will be skipped. Arguments: src_vector {str} -- Filename of the vector dataset. Currently it must have the same CRS as the raster. burn_attribute {str} -- Name of the attribute column in the ``src_vector`` dataset to be stored with the extracted data. This should usually be a unique ID for the features (points, lines, polygons) in the vector dataset. src_raster {list} -- List of filenames of the single band raster files from which to extract. dst_names {list} -- List corresponding to ``src_raster`` names used to store and later identify the extracted to. dst_dir {str} -- Directory to store the data to. Keyword Arguments: src_raster_template {str} -- A template raster to be used for rasterizing the vectorfile. Usually the first element of ``src_raster``. (default: {None}) gdal_dtype {int} -- Numeric GDAL data type, defaults to 4 which is UInt32. See https://github.com/mapbox/rasterio/blob/master/rasterio/dtypes.py for useful look-up tables. Returns: [int] -- If successful, 0 is returned as exit code. """ if src_raster_template is None: src_raster_template = src_raster[0] path_rasterized = os.path.join(dst_dir, f"burn_attribute_rasterized_{burn_attribute}.tif") paths_extracted_aux = {ele: os.path.join(dst_dir, f"{ele}.npy") \ for ele in [f"aux_vector_{burn_attribute}", "aux_coord_x", "aux_coord_y"]} paths_extracted_raster = {} for path, name in zip(src_raster, dst_names): dst = f"{os.path.join(dst_dir, name)}.npy" if not os.path.exists(dst): paths_extracted_raster[path] = dst if not os.path.exists(dst_dir): os.makedirs(dst_dir) # if it does not already exist, here we first create the rasterized data if not os.path.exists(path_rasterized): if src_raster_template is None: src_raster_template = src_raster[0] # print("Rasterizing vector attribute.") rasterize(src_vector=src_vector, burn_attribute=burn_attribute,
python
{ "resource": "" }
q6274
Plane.extrema
train
def extrema(self, x0, y0, w, h): """ Returns the minimum and maximum values contained in a given area. :param x0: Starting x index. :param y0: Starting y index. :param w: Width of the area to scan. :param h: Height of the area to scan. :return: Tuple containing the minimum and maximum values of the given area. """ minimum = 9223372036854775807 maximum = 0 for y in range(y0, y0 + h):
python
{ "resource": "" }
q6275
Cursebox.set_cursor
train
def set_cursor(self, x, y): """ Sets the cursor to the desired position. :param x: X position
python
{ "resource": "" }
q6276
Cursebox.put
train
def put(self, x, y, text, fg, bg): """ Puts a string at the desired coordinates using the provided colors. :param x: X position :param y: Y position :param text: Text to write :param fg: Foreground color number :param bg: Background color number """ if x < self.width and y < self.height: try:
python
{ "resource": "" }
q6277
Cursebox.poll_event
train
def poll_event(self): """ Waits for an event to happen and returns a string related to the event. If the event is a normal (letter) key press, the letter is returned (case sensitive) :return: Event type """ # Flush all inputs before this one that were done since last poll curses.flushinp() ch = self.screen.getch() if ch == 27: return EVENT_ESC elif ch == -1 or ch == curses.KEY_RESIZE: return EVENT_RESIZE elif ch == 10 or ch == curses.KEY_ENTER: return EVENT_ENTER elif ch == 127 or ch == curses.KEY_BACKSPACE:
python
{ "resource": "" }
q6278
draw_panel
train
def draw_panel(cb, pool, params, plane): """ Draws the application's main panel, displaying the current Mandelbrot view. :param cb: Cursebox instance. :type cb: cursebox.Cursebox :param params: Current application parameters. :type params: params.Params :param plane: Plane containing the current Mandelbrot values. :type plane: plane.Plane """ w = cb.width - MENU_WIDTH - 1 h = cb.height - 1 params.plane_w = w params.plane_h = h params.resize(w, h) palette = PALETTES[params.palette][1] if params.reverse_palette: palette = palette[::-1] # draw_gradient(t, 1, 1, w, h, palette, params.dither_type) generated = 0 missing_coords = [] # Check for coordinates that have no value in current plane xs = range(params.plane_x0, params.plane_x0 + params.plane_w - 1) ys = range(params.plane_y0, params.plane_y0 + params.plane_h - 1) for x in xs: for y in ys: if plane[x, y] is None: missing_coords.append((x, y, params)) generated += 1 # Compute all missing values via multiprocessing n_processes = 0 if len(missing_coords) > 0: n_cores = pool._processes n_processes = len(missing_coords) // 256 if n_processes > n_cores: n_processes = n_cores start = time.time() for i, result in enumerate(pool.imap_unordered(compute, missing_coords, chunksize=256)): plane[result[0], result[1]] = result[2] if time.time() - start > 2: if i % 200 == 0: draw_progress_bar(cb, "Render is taking a longer time...", i, len(missing_coords)) cb.refresh() if generated > 0: params.log("Added %d missing cells" % generated) if n_processes > 1: params.log("(Used %d processes)" % n_processes) min_value = 0.0 max_value = params.max_iterations max_iterations = params.max_iterations if params.adaptive_palette: min_value, max_value = plane.extrema(params.plane_x0, params.plane_y0, params.plane_w, params.plane_h) crosshairs_coord = None if params.crosshairs: crosshairs_coord = params.crosshairs_coord # Draw all values in cursebox for x in
python
{ "resource": "" }
q6279
update_display
train
def update_display(cb, pool, params, plane, qwertz): """ Draws everything. :param cb: Cursebox instance. :type cb: cursebox.Cursebox :param params: Current application parameters. :type params: params.Params :param plane: Plane containing the current Mandelbrot values. :type plane: plane.Plane :return: """ cb.clear()
python
{ "resource": "" }
q6280
save
train
def save(params): """ Saves the current parameters to a file. :param params: Current application parameters. :return: """ if is_python3(): import pickle cPickle = pickle
python
{ "resource": "" }
q6281
capture
train
def capture(cb, pool, params): """ Renders and saves a screen-sized picture of the current position. :param cb: Cursebox instance. :type cb: cursebox.Cursebox :param params: Current application parameters. :type params: params.Params """ w, h = screen_resolution() # Re-adapt dimensions to match current plane ratio old_ratio = w / h new_ratio = params.plane_ratio if old_ratio > new_ratio: w = int(h * new_ratio) else: h = int(w / new_ratio) image = Image.new("RGB", (w, h), "white") pixels = image.load() # FIXME: refactor common code to get_palette(params) palette = PALETTES[params.palette][1] if params.reverse_palette: palette = palette[::-1] # All coordinates to be computed as single arguments for processes coords = [(x, y, w, h, params) for x in range(w) for y in range(h)] results = [] # Dispatch work to pool and draw results as they come in for i, result in enumerate(pool.imap_unordered(compute_capture, coords, chunksize=256)): results.append(result) if i % 2000 == 0: draw_progress_bar(cb, "Capturing current scene...", i, w * h) cb.refresh() min_value = 0.0 max_value = params.max_iterations max_iterations = params.max_iterations if params.adaptive_palette: from operator import itemgetter min_value = min(results, key=itemgetter(2))[2] max_value = max(results, key=itemgetter(2))[2]
python
{ "resource": "" }
q6282
cycle
train
def cycle(cb, pool, params, plane): """ Fun function to do a palette cycling animation. :param cb: Cursebox instance. :type cb: cursebox.Cursebox :param params: Current application parameters. :type params: params.Params :param plane: Plane containing the current Mandelbrot values. :type plane: plane.Plane :return: """ step = params.max_iterations //
python
{ "resource": "" }
q6283
init_coords
train
def init_coords(cb, params): """ Initializes coordinates and zoom for first use. Loads coordinates from Mandelbrot-space. :param cb: Cursebox instance. :type cb: cursebox.Cursebox
python
{ "resource": "" }
q6284
screen_resolution
train
def screen_resolution(): """ Returns the current screen's resolution. Should be multi-platform. :return: A tuple containing the width and height of the screen. """ w = 0 h = 0 try: # Windows import ctypes user32 = ctypes.windll.user32 w, h = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1) except AttributeError: try:
python
{ "resource": "" }
q6285
open_file
train
def open_file(filename): """ Multi-platform way to make the OS open a file with its default application """ if sys.platform.startswith("darwin"): subprocess.call(("open", filename)) elif sys.platform == "cygwin": subprocess.call(("cygstart",
python
{ "resource": "" }
q6286
mandelbrot_iterate
train
def mandelbrot_iterate(c, max_iterations, julia_seed=None): """ Returns the number of iterations before escaping the Mandelbrot fractal. :param c: Coordinates as a complex number :type c: complex :param max_iterations: Limit of how many tries are attempted. :return: Tuple containing the last complex number in the sequence and the number of iterations. """ z =
python
{ "resource": "" }
q6287
mandelbrot
train
def mandelbrot(x, y, params): """ Computes the number of iterations of the given plane-space coordinates. :param x: X coordinate on the plane. :param y: Y coordinate on the plane.
python
{ "resource": "" }
q6288
mandelbrot_capture
train
def mandelbrot_capture(x, y, w, h, params): """ Computes the number of iterations of the given pixel-space coordinates, for high-res capture purposes. Contrary to :func:`mandelbrot`, this function returns a continuous number of iterations to avoid banding. :param x: X coordinate on the picture :param y: Y coordinate on the picture :param w: Width of the picture :param h: Height of the picture :param params: Current application parameters. :type params: params.Params :return: Continuous number of iterations. """ # FIXME: Figure out why these corrections are necessary or how to make
python
{ "resource": "" }
q6289
update_position
train
def update_position(params): """ Computes the center of the viewport's Mandelbrot-space coordinates. :param params: Current application parameters. :type params: params.Params """ cx
python
{ "resource": "" }
q6290
zoom
train
def zoom(params, factor): """ Applies a zoom on the current parameters. Computes the top-left plane-space coordinates from the Mandelbrot-space coordinates. :param params: Current application parameters. :param factor: Zoom factor by which the zoom ratio is divided (bigger factor, more zoom) """ params.zoom /= factor n_x = params.mb_cx / params.zoom
python
{ "resource": "" }
q6291
Params.resize
train
def resize(self, w, h): """ Used when resizing the plane, resets the plane ratio factor. :param w: New width of the visible section of the plane. :param h: New height of the visible section of the plane. """ self.plane_w = w
python
{ "resource": "" }
q6292
check_sender_and_entity_handle_match
train
def check_sender_and_entity_handle_match(sender_handle, entity_handle): """Ensure that sender and entity handles match. Basically we've already verified the sender is who they say when receiving the payload. However, the sender might be trying to set another author in the payload itself, since Diaspora has the sender in both the payload headers AND the object. We must ensure they're the same. """
python
{ "resource": "" }
q6293
transform_attributes
train
def transform_attributes(attrs, cls): """Transform some attribute keys. :param attrs: Properties from the XML :type attrs: dict :param cls: Class of the entity :type cls: class """ transformed = {} for key, value in attrs.items(): if value is None: value = "" if key == "text": transformed["raw_content"] = value elif key == "author": if cls == DiasporaProfile: # Diaspora Profile XML message contains no GUID. We need the guid. Fetch it. profile = retrieve_and_parse_profile(value) transformed['id'] = value transformed["guid"] = profile.guid else: transformed["actor_id"] = value transformed["handle"] = value elif key == 'guid': if cls != DiasporaProfile: transformed["id"] = value transformed["guid"] = value elif key in ("root_author", "recipient"): transformed["target_id"] = value transformed["target_handle"] = value elif key in ("target_guid", "root_guid", "parent_guid"): transformed["target_id"] = value transformed["target_guid"] = value elif key in ("first_name", "last_name"): values = [attrs.get('first_name'), attrs.get('last_name')] values = [v for v in values if v] transformed["name"] = " ".join(values) elif key == "image_url": if "image_urls" not in transformed: transformed["image_urls"] = {} transformed["image_urls"]["large"] = value elif key == "image_url_small": if "image_urls" not in transformed: transformed["image_urls"] = {} transformed["image_urls"]["small"] = value elif key == "image_url_medium": if "image_urls" not in transformed: transformed["image_urls"] = {} transformed["image_urls"]["medium"] = value elif key == "tag_string": if value: transformed["tag_list"] = value.replace("#", "").split(" ")
python
{ "resource": "" }
q6294
get_element_child_info
train
def get_element_child_info(doc, attr): """Get information from child elements of this elementas a list since order is important. Don't include signature tags. :param doc: XML element :param attr: Attribute to get from the elements, for example "tag" or "text". """ props = [] for child
python
{ "resource": "" }
q6295
rfc7033_webfinger_view
train
def rfc7033_webfinger_view(request, *args, **kwargs): """ Django view to generate an RFC7033 webfinger. """ resource = request.GET.get("resource") if not resource: return HttpResponseBadRequest("No resource found") if not resource.startswith("acct:"): return HttpResponseBadRequest("Invalid resource") handle = resource.replace("acct:", "").lower() profile_func = get_function_from_config("get_profile_function") try: profile = profile_func(handle=handle, request=request) except Exception as exc: logger.warning("rfc7033_webfinger_view - Failed to get profile by handle %s: %s", handle, exc) return HttpResponseNotFound() config = get_configuration() webfinger = RFC7033Webfinger( id=profile.id,
python
{ "resource": "" }
q6296
retrieve_diaspora_hcard
train
def retrieve_diaspora_hcard(handle): """ Retrieve a remote Diaspora hCard document. :arg handle: Remote handle to retrieve :return: str (HTML document) """ webfinger = retrieve_and_parse_diaspora_webfinger(handle)
python
{ "resource": "" }
q6297
retrieve_and_parse_diaspora_webfinger
train
def retrieve_and_parse_diaspora_webfinger(handle): """ Retrieve a and parse a remote Diaspora webfinger document. :arg handle: Remote handle to retrieve :returns: dict """ try: host = handle.split("@")[1] except AttributeError: logger.warning("retrieve_and_parse_diaspora_webfinger: invalid handle given: %s", handle) return None document, code, exception = fetch_document( host=host, path="/.well-known/webfinger?resource=acct:%s" % quote(handle), )
python
{ "resource": "" }
q6298
retrieve_diaspora_host_meta
train
def retrieve_diaspora_host_meta(host): """ Retrieve a remote Diaspora host-meta document. :arg host: Host to retrieve from :returns: ``XRD``
python
{ "resource": "" }
q6299
_get_element_text_or_none
train
def _get_element_text_or_none(document, selector): """ Using a CSS selector, get the element and return the text, or None if no element. :arg document: ``HTMLElement`` document :arg selector: CSS selector
python
{ "resource": "" }