_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
31
13.1k
language
stringclasses
1 value
meta_information
dict
q14100
ColorCode._init_hex
train
def _init_hex(self, hexval: str) -> None: """ Initialize from a hex value string. """ self.hexval = hex2termhex(fix_hex(hexval))
python
{ "resource": "" }
q14101
ColorCode._init_rgb
train
def _init_rgb(self, r: int, g: int, b: int) -> None: """ Initialize from red, green, blue args. """ if self.rgb_mode: self.rgb = (r, g, b) self.hexval = rgb2hex(r, g, b) else:
python
{ "resource": "" }
q14102
ColorCode.from_code
train
def from_code(cls, code: int) -> 'ColorCode': """ Return a ColorCode
python
{ "resource": "" }
q14103
ColorCode.from_hex
train
def from_hex(cls, hexval: str) -> 'ColorCode': """ Return a
python
{ "resource": "" }
q14104
ColorCode.from_rgb
train
def from_rgb(cls, r: int, g: int, b: int) -> 'ColorCode': """ Return
python
{ "resource": "" }
q14105
commit
train
def commit(func): '''Used as a decorator for automatically making session commits''' def wrap(**kwarg): with session_withcommit() as session: a = func(**kwarg) session.add(a)
python
{ "resource": "" }
q14106
get_hub
train
def get_hub(): """Return the instance of the hub.""" try: hub = _local.hub except AttributeError: # The Hub can only be instantiated from the root fiber. No other fibers # can run until the Hub is there, so the root will always be the first
python
{ "resource": "" }
q14107
switch_back.switch
train
def switch(self, value=None): """Switch back to the origin fiber. The fiber is switch in next time the event loop runs."""
python
{ "resource": "" }
q14108
switch_back.throw
train
def throw(self, typ, val=None, tb=None): """Throw an exception into the origin fiber. The exception is thrown the next time the event loop runs.""" # The might seem redundant with self._fiber.cancel(exc), but it isn't # as self._fiber might be a "raw" fibers.Fiber() that doesn't have a # cancel() method.
python
{ "resource": "" }
q14109
Hub.close
train
def close(self): """Close the hub and wait for it to be closed. This may only be called in the root fiber. After this call returned, Gruvi cannot be used anymore in the current thread. The main use case for calling this method is to clean up resources in a multi-threaded program where you want to exit a thead but not yet the entire process. """ if self._loop is None: return if fibers.current().parent is not None: raise RuntimeError('close() may only be called in the root fiber') elif compat.get_thread_ident() != self._thread:
python
{ "resource": "" }
q14110
Hub.switch
train
def switch(self): """Switch to the hub. This method pauses the current fiber and runs the event loop. The caller should ensure that it has set up appropriate callbacks so that it will get scheduled again, preferably using :class:`switch_back`. In this case then return value of this method will be an ``(args, kwargs)`` tuple containing the arguments passed to the switch back instance. If this method is called from the root fiber then there are two additional cases. If the hub exited due to a call to :meth:`close`, then this method returns None. And if the hub exited due to a
python
{ "resource": "" }
q14111
Hub._run_callbacks
train
def _run_callbacks(self): """Run registered callbacks.""" for i in range(len(self._callbacks)): callback, args = self._callbacks.popleft() try: callback(*args)
python
{ "resource": "" }
q14112
Hub.run_callback
train
def run_callback(self, callback, *args): """Queue a callback. The *callback* will be called with positional arguments *args* in the next iteration of the event loop. If you add multiple callbacks, they will be called in the order that you added them. The callback will run in the Hub's fiber. This method is thread-safe: it is allowed to queue a callback from a different thread than the one running the Hub. """ if self._loop is None:
python
{ "resource": "" }
q14113
message_info
train
def message_info(message): """Return a string describing a message, for debugging purposes.""" method = message.get('method') msgid = message.get('id') error = message.get('error') if method and msgid is not None: return 'method call "{}", id = "{}"'.format(method, msgid) elif method: return 'notification "{}"'.format(method) elif error is not None and msgid is not None:
python
{ "resource": "" }
q14114
JsonRpcVersion.next_id
train
def next_id(self): """Return a unique message ID.""" msgid
python
{ "resource": "" }
q14115
JsonRpcProtocol.send_message
train
def send_message(self, message): """Send a raw JSON-RPC message. The *message* argument must be a dictionary containing a valid JSON-RPC message according to the version passed into the constructor. """ if self._error: raise
python
{ "resource": "" }
q14116
JsonRpcProtocol.call_method
train
def call_method(self, method, *args): """Call a JSON-RPC method and wait for its result. The *method* is called with positional arguments *args*. On success, the ``result`` field from the JSON-RPC response is returned. On error, a :class:`JsonRpcError` is raised, which you can use to access the ``error`` field of the JSON-RPC response. """ message = self._version.create_request(method, args) msgid = message['id'] try: with switch_back(self._timeout) as switcher: self._method_calls[msgid] = switcher
python
{ "resource": "" }
q14117
JsonRpcProtocol.send_notification
train
def send_notification(self, method, *args): """Send a JSON-RPC notification. The notification *method* is sent with positional arguments *args*. """
python
{ "resource": "" }
q14118
JsonRpcProtocol.send_response
train
def send_response(self, request, result=None, error=None): """Respond to a JSON-RPC method call. This is a response to the message in *request*. If *error* is not provided, then this is a succesful response, and the value in *result*, which may be ``None``, is passed back to the client. if *error* is provided and not ``None``
python
{ "resource": "" }
q14119
unmarshal
train
def unmarshal( compoundSignature, data, offset = 0, lendian = True ): """ Unmarshals DBus encoded data. @type compoundSignature: C{string} @param compoundSignature: DBus signature specifying the encoded value types @type data: C{string} @param data: Binary data @type offset: C{int} @param offset: Offset within data at which data for compoundSignature starts (used during recursion) @type lendian: C{bool} @param lendian: True if data is encoded in little-endian format @returns: (number_of_bytes_decoded, list_of_values) """ values
python
{ "resource": "" }
q14120
spawn
train
def spawn(func, *args, **kwargs): """Spawn a new fiber. A new :class:`Fiber` is created with main function *func* and positional arguments *args*. The keyword arguments are passed to the :class:`Fiber` constructor, not to the main function. The fiber is then
python
{ "resource": "" }
q14121
Fiber.start
train
def start(self): """Schedule the fiber to be started in the next iteration of the event loop.""" target = getattr(self._target, '__qualname__', self._target.__name__)
python
{ "resource": "" }
q14122
Fiber.cancel
train
def cancel(self, message=None): """Schedule the fiber to be cancelled in the next iteration of the event loop. Cancellation works by throwing a :class:`~gruvi.Cancelled` exception into the fiber. If *message* is provided, it will be set as the value of the exception. """
python
{ "resource": "" }
q14123
MetaMusic.align_matches
train
def align_matches(self, matches: list)->Optional[dict]: """ Finds hash matches that align in time with other matches and finds consensus about which hashes are "true" signal from the audio. Returns a dictionary with match information. """ # align by diffs diff_counter: dict = {} largest = 0 largest_count = 0 song_id = -1 for sid, diff in matches: if diff not in diff_counter: diff_counter[diff] = {} if sid not in diff_counter[diff]: diff_counter[diff][sid] = 0 diff_counter[diff][sid] += 1 if diff_counter[diff][sid] > largest_count: largest = diff largest_count = diff_counter[diff][sid] song_id = sid # extract idenfication song = database.get_song_by_id(song_id) if song: songname = song.song_name
python
{ "resource": "" }
q14124
saddr
train
def saddr(address): """Return a string representation for an address. The *address* paramater can be a pipe name, an IP address tuple, or a socket address. The return value is always a ``str`` instance. """ if isinstance(address, six.string_types): return address elif isinstance(address, tuple) and len(address) >= 2 and ':' in address[0]: return '[{}]:{}'.format(address[0],
python
{ "resource": "" }
q14125
paddr
train
def paddr(address): """Parse a string representation of an address. This function is the inverse of :func:`saddr`. """ if not isinstance(address, six.string_types): raise TypeError('expecting a string') if address.startswith('['): p1 = address.find(']:') if p1 == -1:
python
{ "resource": "" }
q14126
get_codes
train
def get_codes(s: Union[str, 'ChainedBase']) -> List[str]: """ Grab all escape codes
python
{ "resource": "" }
q14127
get_indices
train
def get_indices(s: Union[str, 'ChainedBase']) -> Dict[int, str]: """ Retrieve a dict of characters and escape codes with their real index into the string as the key. """ codes = get_code_indices(s) if not codes: # This function is not for non-escape-code stuff, but okay. return {i: c for i, c in enumerate(s)} indices = {} for codeindex in sorted(codes): code = codes[codeindex] if codeindex == 0: indices[codeindex] = code continue # Grab characters before codeindex. start = max(indices or {0: ''}, key=int) startcode = indices.get(start, '')
python
{ "resource": "" }
q14128
MultiPoll.remove_callback
train
def remove_callback(self, handle): """Remove a callback.""" if self._poll is None: raise RuntimeError('poll instance is closed') remove_callback(self, handle) if handle.extra & READABLE:
python
{ "resource": "" }
q14129
MultiPoll.update_callback
train
def update_callback(self, handle, events): """Update the event mask for a callback.""" if self._poll is None: raise RuntimeError('poll instance is closed') if not has_callback(self, handle): raise ValueError('no such callback') if events & ~(READABLE|WRITABLE): raise ValueError('illegal event mask: {}'.format(events)) if handle.extra == events: return if handle.extra & READABLE: self._readers -= 1
python
{ "resource": "" }
q14130
MultiPoll.close
train
def close(self): """Close the poll instance.""" if self._poll is None: return
python
{ "resource": "" }
q14131
Poller.update_callback
train
def update_callback(self, fd, handle, events): """Update the event mask associated with an existing callback. If you want to temporarily disable a callback then you can use this method with an *events* argument of ``0``. This is more efficient than removing the callback and adding it again later. """
python
{ "resource": "" }
q14132
Poller.close
train
def close(self): """Close all active poll instances and remove all callbacks.""" if self._mpoll is None: return
python
{ "resource": "" }
q14133
ClientAuthenticator.authTryNextMethod
train
def authTryNextMethod(self): """ Tries the next authentication method or raises a failure if all mechanisms have been tried. """ if not self.authOrder: raise DBusAuthenticationFailed() self.authMech = self.authOrder.pop() if self.authMech == 'DBUS_COOKIE_SHA1': self.sendAuthMessage('AUTH ' + self.authMech + ' ' +
python
{ "resource": "" }
q14134
ClientAuthenticator._authGetDBusCookie
train
def _authGetDBusCookie(self, cookie_context, cookie_id): """ Reads the requested cookie_id from the cookie_context file """ # XXX Ensure we obtain the correct directory for the # authenticating user and that that user actually # owns the keyrings directory if self.cookie_dir is None: cookie_dir = os.path.expanduser('~/.dbus-keyrings') else: cookie_dir = self.cookie_dir dstat = os.stat(cookie_dir) if dstat.st_mode & 0x36: # 066 raise Exception('User keyrings directory is writeable by other users. Aborting authentication') import pwd if dstat.st_uid != pwd.getpwuid(os.geteuid()).pw_uid:
python
{ "resource": "" }
q14135
add_callback
train
def add_callback(obj, callback, args=()): """Add a callback to an object.""" callbacks = obj._callbacks node = Node(callback, args) # Store a single callback directly in _callbacks if callbacks is None: obj._callbacks = node return node # Otherwise use a dllist.
python
{ "resource": "" }
q14136
remove_callback
train
def remove_callback(obj, handle): """Remove a callback from an object.""" callbacks = obj._callbacks if callbacks is handle: obj._callbacks = None elif
python
{ "resource": "" }
q14137
has_callback
train
def has_callback(obj, handle): """Return whether a callback is currently registered for an object.""" callbacks = obj._callbacks if not callbacks: return
python
{ "resource": "" }
q14138
pop_callback
train
def pop_callback(obj): """Pop a single callback.""" callbacks = obj._callbacks if not callbacks: return if isinstance(callbacks, Node): node = callbacks obj._callbacks = None else:
python
{ "resource": "" }
q14139
clear_callbacks
train
def clear_callbacks(obj): """Remove all callbacks from an object.""" callbacks = obj._callbacks if isinstance(callbacks, dllist): # Help the
python
{ "resource": "" }
q14140
run_callbacks
train
def run_callbacks(obj, log=None): """Run callbacks.""" def run_callback(callback, args):
python
{ "resource": "" }
q14141
get_serializer_class
train
def get_serializer_class(configuration_model): """ Returns a ConfigurationModel serializer class for the supplied configuration_model. """ class AutoConfigModelSerializer(ModelSerializer): """Serializer class for configuration models.""" class Meta(object): """Meta information for AutoConfigModelSerializer.""" model = configuration_model fields = '__all__' def create(self, validated_data): if "changed_by_username" in self.context:
python
{ "resource": "" }
q14142
deserialize_json
train
def deserialize_json(stream, username): """ Given a stream containing JSON, deserializers the JSON into ConfigurationModel instances. The stream is expected to be in the following format: { "model": "config_models.ExampleConfigurationModel", "data": [ { "enabled": True, "color": "black" ... }, { "enabled": False, "color": "yellow" ... }, ... ] } If the provided stream does not contain valid JSON for the ConfigurationModel specified, an Exception will be raised. Arguments: stream: The stream of JSON, as described above. username: The username of the user making the change. This must match an existing user. Returns: the number of created entries
python
{ "resource": "" }
q14143
ConfigurationModelAdmin.get_displayable_field_names
train
def get_displayable_field_names(self): """ Return all field names, excluding reverse foreign key relationships. """ return [ f.name
python
{ "resource": "" }
q14144
ConfigurationModelAdmin.revert
train
def revert(self, request, queryset): """ Admin action to revert a configuration back to the selected value """ if queryset.count() != 1: self.message_user(request, _("Please select a single configuration to revert to.")) return target = queryset[0] target.id = None self.save_model(request, target, None, False) self.message_user(request, _("Reverted configuration."))
python
{ "resource": "" }
q14145
ShowHistoryFilter.choices
train
def choices(self, changelist): """ Returns choices ready to be output in the template. """ show_all = self.used_parameters.get(self.parameter_name) == "1" return ( { 'display': _('Current Configuration'), 'selected': not show_all, 'query_string': changelist.get_query_string({}, [self.parameter_name]), }, {
python
{ "resource": "" }
q14146
KeyedConfigurationModelAdmin.get_queryset
train
def get_queryset(self, request): """ Annote the queryset with an 'is_active' property that's true iff that row is the most recently added row for that particular set of KEY_FIELDS values. Filter the queryset to show only is_active rows by default.
python
{ "resource": "" }
q14147
KeyedConfigurationModelAdmin.edit_link
train
def edit_link(self, inst): """ Edit link for the change view """ if not inst.is_active: return u'--' update_url = reverse('admin:{}_{}_add'.format(self.model._meta.app_label, self.model._meta.model_name))
python
{ "resource": "" }
q14148
submit_row
train
def submit_row(context): """ Overrides 'django.contrib.admin.templatetags.admin_modify.submit_row'. Manipulates the context going into that function by hiding all of the buttons in the submit row if the key `readonly` is set in the context. """ ctx = original_submit_row(context) if context.get('readonly', False): ctx.update({
python
{ "resource": "" }
q14149
ConfigurationModelManager.current_set
train
def current_set(self): """ A queryset for the active configuration entries only. Only useful if KEY_FIELDS is set. Active means the means recent entries for each unique combination of keys. It does not necessaryily mean enbled. """ assert self.model.KEY_FIELDS != (), "Just use model.current() if
python
{ "resource": "" }
q14150
ConfigurationModelManager.with_active_flag
train
def with_active_flag(self): """ A query set where each result is annotated with an 'is_active' field that indicates if it's the most recent entry for that combination of keys. """ if self.model.KEY_FIELDS: return self.get_queryset().annotate( is_active=models.ExpressionWrapper( models.Q(pk__in=self._current_ids_subquery()), output_field=models.IntegerField(),
python
{ "resource": "" }
q14151
ConfigurationModel.save
train
def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Clear the cached value when saving a new configuration entry """ # Always create a new entry, instead of updating an existing model self.pk = None # pylint: disable=invalid-name super(ConfigurationModel, self).save( force_insert, force_update, using,
python
{ "resource": "" }
q14152
ConfigurationModel.cache_key_name
train
def cache_key_name(cls, *args): """Return the name of the key to use to cache the current configuration""" if cls.KEY_FIELDS != (): if len(args) != len(cls.KEY_FIELDS): raise TypeError( "cache_key_name() takes exactly {} arguments ({} given)".format(len(cls.KEY_FIELDS), len(args)) )
python
{ "resource": "" }
q14153
ConfigurationModel.key_values_cache_key_name
train
def key_values_cache_key_name(cls, *key_fields): """ Key for fetching unique key values from the cache """ key_fields = key_fields or cls.KEY_FIELDS
python
{ "resource": "" }
q14154
ConfigurationModel.fields_equal
train
def fields_equal(self, instance, fields_to_ignore=("id", "change_date", "changed_by")): """ Compares this instance's fields to the supplied instance to test for equality. This will ignore any fields in `fields_to_ignore`. Note that this method ignores many-to-many fields. Args: instance: the model instance to compare
python
{ "resource": "" }
q14155
ConfigurationModel.equal_to_current
train
def equal_to_current(cls, json, fields_to_ignore=("id", "change_date", "changed_by")): """ Compares for equality this instance to a model instance constructed from the supplied JSON. This will ignore any fields in `fields_to_ignore`. Note that this method cannot handle fields with many-to-many associations, as those can only be set on a saved model instance (and saving the model instance will create a new entry). All many-to-many field entries will be removed before the equality comparison is done. Args: json: json representing an entry to compare fields_to_ignore: List of fields that should not
python
{ "resource": "" }
q14156
AtomicMixin.create_atomic_wrapper
train
def create_atomic_wrapper(cls, wrapped_func): """Returns a wrapped function.""" def _create_atomic_wrapper(*args, **kwargs): """Actual wrapper.""" # When a view call fails due to a permissions error, it raises an exception. # An uncaught exception breaks the DB transaction for any following DB operations
python
{ "resource": "" }
q14157
AtomicMixin.as_view
train
def as_view(cls, **initkwargs): """Overrides as_view to add atomic transaction.""" view
python
{ "resource": "" }
q14158
merge
train
def merge(file, feature_layers): ''' Retrieve a list of OSciMap4 tile responses and merge them into one. get_tiles() retrieves data and performs basic integrity checks. ''' tile = VectorTile(extents) for
python
{ "resource": "" }
q14159
_make_valid_if_necessary
train
def _make_valid_if_necessary(shape): """ attempt to correct invalid shapes if necessary After simplification, even when preserving topology, invalid shapes can be returned. This appears to only occur with polygon types. As an optimization, we only check if the polygon types are valid. """ if shape.type in ('Polygon', 'MultiPolygon') and not shape.is_valid:
python
{ "resource": "" }
q14160
_accumulate_props
train
def _accumulate_props(dest_props, src_props): """ helper to accumulate a dict of properties Mutates dest_props by adding the non None src_props and returns the new size """ props_size = 0 if src_props: for k, v
python
{ "resource": "" }
q14161
calculate_sizes_by_zoom
train
def calculate_sizes_by_zoom(coord, metatile_zoom, cfg_tile_sizes, max_zoom): """ Returns a map of nominal zoom to the list of tile sizes to generate at that zoom. This is because we want to generate different metatile contents at different zoom levels. At the most detailed zoom level, we want to generate the smallest tiles possible, as this allows "overzooming" by simply extracting the smaller tiles. At the minimum zoom, we want to get as close as we can to zero nominal zoom by using any "unused" space in the metatile for larger tile sizes that we're not generating. For example, with 1x1 metatiles, the tile size is always 256px, and the function will return {coord.zoom: [256]} Note that max_zoom should be the maximum *coordinate* zoom, not nominal zoom. """ from tilequeue.tile import metatile_zoom_from_size tile_size_by_zoom = {} nominal_zoom = coord.zoom + metatile_zoom # check that the tile sizes are correct and within range. for tile_size in cfg_tile_sizes: assert tile_size >= 256 assert tile_size <= 256 * (1 << metatile_zoom) assert _is_power_of_2(tile_size) if coord.zoom >= max_zoom: # all the tile_sizes down to 256 at the nominal zoom. tile_sizes = [] tile_sizes.extend(cfg_tile_sizes) lowest_tile_size = min(tile_sizes) while lowest_tile_size > 256:
python
{ "resource": "" }
q14162
calculate_cut_coords_by_zoom
train
def calculate_cut_coords_by_zoom( coord, metatile_zoom, cfg_tile_sizes, max_zoom): """ Returns a map of nominal zoom to the list of cut coordinates at that nominal zoom. Note that max_zoom should be the maximum coordinate zoom, not nominal zoom. """ tile_sizes_by_zoom = calculate_sizes_by_zoom( coord, metatile_zoom, cfg_tile_sizes, max_zoom) cut_coords_by_zoom = {} for nominal_zoom, tile_sizes in tile_sizes_by_zoom.iteritems():
python
{ "resource": "" }
q14163
tiles_are_equal
train
def tiles_are_equal(tile_data_1, tile_data_2, fmt): """ Returns True if the tile data is equal in tile_data_1 and tile_data_2. For most formats, this is a simple byte-wise equality check. For zipped
python
{ "resource": "" }
q14164
write_tile_if_changed
train
def write_tile_if_changed(store, tile_data, coord, format): """ Only write tile data if different from existing. Try to read the tile data from the store first. If the existing data matches, don't write. Returns whether the tile was written. """ existing_data = store.read_tile(coord, format) if not existing_data or \
python
{ "resource": "" }
q14165
_override_cfg
train
def _override_cfg(container, yamlkeys, value): """ Override a hierarchical key in the config, setting it to the value. Note that yamlkeys should be a non-empty list of strings. """ key = yamlkeys[0] rest = yamlkeys[1:] if len(rest) == 0:
python
{ "resource": "" }
q14166
coord_pyramid
train
def coord_pyramid(coord, zoom_start, zoom_stop): """ generate full pyramid for coord Generate the full pyramid for a single coordinate. Note that zoom_stop is exclusive. """ if zoom_start <= coord.zoom: yield coord for
python
{ "resource": "" }
q14167
coord_pyramids
train
def coord_pyramids(coords, zoom_start, zoom_stop): """ generate full pyramid for coords Generate the full pyramid for the list of coords. Note that zoom_stop is exclusive.
python
{ "resource": "" }
q14168
tilequeue_enqueue_full_pyramid_from_toi
train
def tilequeue_enqueue_full_pyramid_from_toi(cfg, peripherals, args): """enqueue a full pyramid from the z10 toi""" logger = make_logger(cfg, 'enqueue_tiles_of_interest') logger.info('Enqueueing tiles of interest') logger.info('Fetching tiles of interest ...') tiles_of_interest = peripherals.toi.fetch_tiles_of_interest() n_toi = len(tiles_of_interest) logger.info('Fetching tiles of interest ... done') rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml, 'Missing rawr yaml' group_by_zoom = rawr_yaml.get('group-zoom') assert group_by_zoom, 'Missing rawr group-zoom' assert isinstance(group_by_zoom, int), 'Invalid rawr group-zoom' if args.zoom_start is None: zoom_start = group_by_zoom else: zoom_start = args.zoom_start if args.zoom_stop is None: zoom_stop = cfg.max_zoom + 1 # +1 because exclusive else: zoom_stop = args.zoom_stop assert zoom_start >= group_by_zoom assert zoom_stop > zoom_start ungrouped = [] coords_at_group_zoom = set() for coord_int in tiles_of_interest: coord = coord_unmarshall_int(coord_int) if coord.zoom < zoom_start: ungrouped.append(coord)
python
{ "resource": "" }
q14169
tilequeue_enqueue_random_pyramids
train
def tilequeue_enqueue_random_pyramids(cfg, peripherals, args): """enqueue random pyramids""" from tilequeue.stats import RawrTileEnqueueStatsHandler from tilequeue.rawr import make_rawr_enqueuer_from_cfg logger = make_logger(cfg, 'enqueue_random_pyramids') rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml, 'Missing rawr yaml' group_by_zoom = rawr_yaml.get('group-zoom') assert group_by_zoom, 'Missing rawr group-zoom' assert isinstance(group_by_zoom, int), 'Invalid rawr group-zoom' if args.zoom_start is None: zoom_start = group_by_zoom else: zoom_start = args.zoom_start if args.zoom_stop is None: zoom_stop = cfg.max_zoom + 1 # +1 because exclusive else: zoom_stop = args.zoom_stop assert zoom_start >= group_by_zoom assert zoom_stop > zoom_start gridsize = args.gridsize total_samples = getattr(args, 'n-samples') samples_per_cell = total_samples / (gridsize * gridsize) tileset_dim = 2 ** group_by_zoom scale_factor = float(tileset_dim) / float(gridsize) stats = make_statsd_client_from_cfg(cfg) stats_handler = RawrTileEnqueueStatsHandler(stats) rawr_enqueuer = make_rawr_enqueuer_from_cfg( cfg, logger, stats_handler, peripherals.msg_marshaller) for grid_y in xrange(gridsize): tile_y_min = int(grid_y * scale_factor) tile_y_max = int((grid_y+1) * scale_factor) for grid_x in xrange(gridsize): tile_x_min = int(grid_x * scale_factor) tile_x_max = int((grid_x+1) * scale_factor)
python
{ "resource": "" }
q14170
emit_toi_stats
train
def emit_toi_stats(toi_set, peripherals): """ Calculates new TOI stats and emits them via statsd. """ count_by_zoom = defaultdict(int) total = 0 for coord_int in toi_set: coord = coord_unmarshall_int(coord_int)
python
{ "resource": "" }
q14171
tilequeue_stuck_tiles
train
def tilequeue_stuck_tiles(cfg, peripherals): """ Check which files exist on s3 but are not in toi. """ store = _make_store(cfg) format = lookup_format_by_extension('zip')
python
{ "resource": "" }
q14172
tilequeue_tile_status
train
def tilequeue_tile_status(cfg, peripherals, args): """ Report the status of the given tiles in the store, queue and TOI. """ logger = make_logger(cfg, 'tile_status') # friendly warning to avoid confusion when this command outputs nothing # at all when called with no positional arguments. if not args.coords: logger.warning('No coordinates given on the command line.') return # pre-load TOI to avoid having to do it for each coordinate toi = None if peripherals.toi: toi = peripherals.toi.fetch_tiles_of_interest() # TODO: make these configurable! tile_format = lookup_format_by_extension('zip') store = _make_store(cfg) for coord_str in args.coords: coord = deserialize_coord(coord_str) # input checking! make sure that the coordinate is okay to use in # the rest of the code. if not coord: logger.warning('Could not deserialize %r as coordinate', coord_str) continue if not coord_is_valid(coord): logger.warning('Coordinate is not valid: %r (parsed from %r)',
python
{ "resource": "" }
q14173
tilequeue_rawr_enqueue
train
def tilequeue_rawr_enqueue(cfg, args): """command to take tile expiry path and enqueue for rawr tile generation""" from tilequeue.stats import RawrTileEnqueueStatsHandler from tilequeue.rawr import make_rawr_enqueuer_from_cfg msg_marshall_yaml = cfg.yml.get('message-marshall') assert msg_marshall_yaml, 'Missing message-marshall
python
{ "resource": "" }
q14174
_tilequeue_rawr_setup
train
def _tilequeue_rawr_setup(cfg): """command to read from rawr queue and generate rawr tiles""" rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml is not None, 'Missing rawr configuration in yaml' rawr_postgresql_yaml = rawr_yaml.get('postgresql') assert rawr_postgresql_yaml, 'Missing rawr postgresql config' from raw_tiles.formatter.msgpack import Msgpack from raw_tiles.gen import RawrGenerator from raw_tiles.source.conn import ConnectionContextManager from raw_tiles.source import parse_sources from raw_tiles.source import DEFAULT_SOURCES as DEFAULT_RAWR_SOURCES from tilequeue.rawr import RawrS3Sink from tilequeue.rawr import RawrStoreSink import boto3 # pass through the postgresql yaml config directly conn_ctx = ConnectionContextManager(rawr_postgresql_yaml) rawr_source_list = rawr_yaml.get('sources', DEFAULT_RAWR_SOURCES) assert isinstance(rawr_source_list, list), \ 'RAWR source list should be a list' assert len(rawr_source_list) > 0, \ 'RAWR source list should be non-empty' rawr_store = rawr_yaml.get('store') if rawr_store: store = make_store( rawr_store, credentials=cfg.subtree('aws credentials')) rawr_sink = RawrStoreSink(store) else: rawr_sink_yaml = rawr_yaml.get('sink') assert rawr_sink_yaml, 'Missing rawr sink config' sink_type = rawr_sink_yaml.get('type') assert sink_type, 'Missing rawr sink type' if sink_type == 's3': s3_cfg = rawr_sink_yaml.get('s3') assert s3_cfg, 'Missing s3 config'
python
{ "resource": "" }
q14175
tilequeue_rawr_seed_toi
train
def tilequeue_rawr_seed_toi(cfg, peripherals): """command to read the toi and enqueue the corresponding rawr tiles""" tiles_of_interest = peripherals.toi.fetch_tiles_of_interest()
python
{ "resource": "" }
q14176
tilequeue_rawr_seed_all
train
def tilequeue_rawr_seed_all(cfg, peripherals): """command to enqueue all the tiles at the group-by zoom""" rawr_yaml = cfg.yml.get('rawr') assert rawr_yaml is not None, 'Missing rawr configuration in yaml' group_by_zoom = rawr_yaml.get('group-zoom') assert group_by_zoom is not None, 'Missing group-zoom rawr config'
python
{ "resource": "" }
q14177
update_arc_indexes
train
def update_arc_indexes(geometry, merged_arcs, old_arcs): """ Updated geometry arc indexes, and add arcs to merged_arcs along the way. Arguments are modified in-place, and nothing is returned. """ if geometry['type'] in ('Point', 'MultiPoint'): return elif geometry['type'] == 'LineString': for arc_index, old_arc in enumerate(geometry['arcs']): geometry['arcs'][arc_index] = len(merged_arcs) merged_arcs.append(old_arcs[old_arc]) elif geometry['type'] == 'Polygon': for ring in geometry['arcs']:
python
{ "resource": "" }
q14178
get_transform
train
def get_transform(bounds, size=4096): """ Return a TopoJSON transform dictionary and a point-transforming function. Size is the tile size in pixels and sets the implicit output resolution. """ tx, ty = bounds[0], bounds[1] sx, sy = (bounds[2] - bounds[0]) / size, (bounds[3] - bounds[1]) / size def forward(lon, lat): """
python
{ "resource": "" }
q14179
diff_encode
train
def diff_encode(line, transform): """ Differentially encode a shapely linestring or ring. """ coords = [transform(x, y) for (x, y) in line.coords] pairs = zip(coords[:], coords[1:]) diffs
python
{ "resource": "" }
q14180
jinja_filter_bbox_overlaps
train
def jinja_filter_bbox_overlaps(bounds, geometry_col_name, srid=3857): """ Check whether the boundary of the geometry intersects with the bounding box. Note that the usual meaning of "overlaps" in GIS terminology is that the boundaries of the box and polygon intersect, but not the interiors. This means that if the box or polygon is completely within the other, then st_overlaps will be false. However, that's not what we want. This is used for boundary testing, and while we don't want to pull out a whole country boundary if the bounding box is fully within it, we _do_ want to if the country boundary is within the bounding box. Therefore, this test has an extra "or st_contains" test to also pull in any boundaries which are completely within the bounding box. """
python
{ "resource": "" }
q14181
make_db_data_fetcher
train
def make_db_data_fetcher(postgresql_conn_info, template_path, reload_templates, query_cfg, io_pool): """ Returns an object which is callable with the zoom and unpadded bounds
python
{ "resource": "" }
q14182
common_parent
train
def common_parent(a, b): """ Find the common parent tile of both a and b. The common parent is the tile at the highest zoom which both a and b can be transformed into by lowering their zoom levels. """ if a.zoom < b.zoom: b = b.zoomTo(a.zoom).container() elif a.zoom > b.zoom:
python
{ "resource": "" }
q14183
_parent_tile
train
def _parent_tile(tiles): """ Find the common parent tile for a sequence of tiles. """ parent = None for t in tiles: if parent is None: parent =
python
{ "resource": "" }
q14184
make_metatiles
train
def make_metatiles(size, tiles, date_time=None): """ Group by layers, and make metatiles out of all the tiles which share those properties relative to the "top level" tile which is parent of them all. Provide a 6-tuple date_time to set the timestamp on each tile within the
python
{ "resource": "" }
q14185
_metatile_contents_equal
train
def _metatile_contents_equal(zip_1, zip_2): """ Given two open zip files as arguments, this returns True if the zips both contain the same set of files, having the same names, and each file within the zip is byte-wise identical to the one with the same name in the other zip. """ names_1 = set(zip_1.namelist()) names_2 = set(zip_2.namelist())
python
{ "resource": "" }
q14186
metatiles_are_equal
train
def metatiles_are_equal(tile_data_1, tile_data_2): """ Return True if the two tiles are both zipped metatiles and contain the same set of files with the same contents. This ignores the timestamp of the individual files in the zip files, as well as their order or any other metadata. """ try: buf_1 = StringIO.StringIO(tile_data_1)
python
{ "resource": "" }
q14187
make_coord_dict
train
def make_coord_dict(coord): """helper function to make a dict from a coordinate for logging""" return dict( z=int_if_exact(coord.zoom),
python
{ "resource": "" }
q14188
convert_feature_layers_to_dict
train
def convert_feature_layers_to_dict(feature_layers): """takes a list of 'feature_layer' objects and converts to a dict keyed by the layer name"""
python
{ "resource": "" }
q14189
ZoomRangeAndZoomGroupQueueMapper.group
train
def group(self, coords): """return CoordGroups that can be used to send to queues Each CoordGroup represents a message that can be sent to a particular queue, stamped with the queue_id. The list of coords, which can be 1, is what should get used for the payload for each queue message. """ groups = [] for i in range(len(self.zoom_range_items)): groups.append([]) # first group the coordinates based on their queue for coord in coords: for i, zri in enumerate(self.zoom_range_items): toi_match = zri.in_toi is None or \ (coord in self.toi_set) == zri.in_toi if zri.start <= coord.zoom < zri.end and toi_match: groups[i].append(coord) break # now, we need to just verify that for each particular group,
python
{ "resource": "" }
q14190
common_parent
train
def common_parent(coords, parent_zoom): """ Return the common parent for coords Also check that all coords do indeed share the same parent coordinate. """ parent = None for coord in coords: assert parent_zoom <= coord.zoom coord_parent = coord.zoomTo(parent_zoom).container() if parent is
python
{ "resource": "" }
q14191
convert_coord_object
train
def convert_coord_object(coord): """Convert ModestMaps.Core.Coordinate -> raw_tiles.tile.Tile""" assert isinstance(coord, Coordinate)
python
{ "resource": "" }
q14192
unconvert_coord_object
train
def unconvert_coord_object(tile): """Convert rawr_tiles.tile.Tile -> ModestMaps.Core.Coordinate""" assert isinstance(tile, Tile)
python
{ "resource": "" }
q14193
make_rawr_zip_payload
train
def make_rawr_zip_payload(rawr_tile, date_time=None): """make a zip file from the rawr tile formatted data""" if date_time is None: date_time = gmtime()[0:6] buf = StringIO() with zipfile.ZipFile(buf, mode='w') as z: for fmt_data in rawr_tile.all_formatted_data: zip_info
python
{ "resource": "" }
q14194
unpack_rawr_zip_payload
train
def unpack_rawr_zip_payload(table_sources, payload): """unpack a zipfile and turn it into a callable "tables" object.""" # the io we get from S3 is streaming, so we can't seek on it, but zipfile # seems to require that. so we buffer it all in memory. RAWR tiles are # generally up to around 100MB in size, which should be safe to store in
python
{ "resource": "" }
q14195
SqsQueue.send
train
def send(self, payloads, logger, num_tries=5): """ Enqueue payloads to the SQS queue, retrying failed messages with exponential backoff. """ from time import sleep backoff_interval = 1 backoff_factor = 2 for try_counter in xrange(0, num_tries): failed_messages = self.send_without_retry(payloads) # success! if not failed_messages: payloads = [] break # output some information about the failures for debugging # purposes. we expect failures to be quite rare, so we can be # pretty verbose. if logger: for msg in failed_messages: logger.warning("Failed to send message on try %d: Id=%r, " "SenderFault=%r, Code=%r, Message=%r" % (try_counter, msg['Id'],
python
{ "resource": "" }
q14196
SqsQueue.read
train
def read(self): """read a single message from the queue""" resp = self.sqs_client.receive_message( QueueUrl=self.queue_url, MaxNumberOfMessages=1, AttributeNames=('SentTimestamp',), WaitTimeSeconds=self.recv_wait_time_seconds, ) if resp['ResponseMetadata']['HTTPStatusCode'] != 200: raise Exception('Invalid status code from sqs: %s' % resp['ResponseMetadata']['HTTPStatusCode']) msgs = resp.get('Messages') if not msgs:
python
{ "resource": "" }
q14197
SqsQueue.done
train
def done(self, msg_handle): """acknowledge completion of message""" self.sqs_client.delete_message(
python
{ "resource": "" }
q14198
RawrToiIntersector.tiles_of_interest
train
def tiles_of_interest(self): """conditionally get the toi from s3""" # also return back whether the response was cached # useful for metrics is_cached = False get_options = dict( Bucket=self.bucket, Key=self.key, ) if self.etag: get_options['IfNoneMatch'] = self.etag try: resp = self.s3_client.get_object(**get_options) except Exception as e: # boto3 client treats 304 responses as exceptions if isinstance(e, ClientError): resp = getattr(e, 'response', None) assert resp else: raise e status_code = resp['ResponseMetadata']['HTTPStatusCode'] if status_code == 304: assert self.prev_toi toi = self.prev_toi is_cached = True elif status_code == 200:
python
{ "resource": "" }
q14199
_ack_coord_handle
train
def _ack_coord_handle( coord, coord_handle, queue_mapper, msg_tracker, timing_state, tile_proc_logger, stats_handler): """share code for acknowledging a coordinate""" # returns tuple of (handle, error), either of which can be None track_result = msg_tracker.done(coord_handle) queue_handle = track_result.queue_handle if not queue_handle: return None, None tile_queue = queue_mapper.get_queue(queue_handle.queue_id) assert tile_queue, \ 'Missing tile_queue: %s' % queue_handle.queue_id parent_tile = None if track_result.all_done: parent_tile = track_result.parent_tile try: tile_queue.job_done(queue_handle.handle) except Exception as e: stacktrace = format_stacktrace_one_line() tile_proc_logger.error_job_done( 'tile_queue.job_done', e, stacktrace, coord, parent_tile, ) return queue_handle, e if parent_tile is not None: # we completed a tile pyramid and should log appropriately start_time = timing_state['start'] stop_time = convert_seconds_to_millis(time.time())
python
{ "resource": "" }