_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q271300
Widget.prepare_data
test
def prepare_data(self): """Prepare widget data for template.""" result = {} for field in self.fields: data = self.data.get(field.name) result[field.name] = field.prepare_data(data) return result
python
{ "resource": "" }
q271301
Widget.render
test
def render(self, data=None, add_context=None): """Renders the widget as HTML.""" template = loader.get_template(self.template) if not data: data = self.context(self.prepare_data()) if add_context is not None: for key, value in add_context.iteritems(): if key in self.accepted_keywords: data[key] = value return template.render(data)
python
{ "resource": "" }
q271302
BaseIntegration.get_settings
test
def get_settings(cls, show_hidden=False): """ Retrieves the settings for this integration as a dictionary. Removes all hidden fields if show_hidden=False """ settings = Integration.objects.get_settings(cls.ID) if not show_hidden: for field in cls.HIDDEN_FIELDS: settings.pop(field, None) return settings
python
{ "resource": "" }
q271303
FacebookInstantArticlesIntegration.callback
test
def callback(cls, user, query): """Receive OAuth callback request from Facebook.""" # Get settings for this integration settings = cls.get_settings(show_hidden=True) fb = Facebook() payload = { 'client_id': settings['client_id'], 'client_secret': settings['client_secret'], 'code': query['code'], 'redirect_uri': cls.REDIRECT_URI } try: # Authenticate with Facebook fb.get_access_token(payload) # Fetch pages belonging to authenticated user pages = fb.list_pages('me') except FacebookAPIError, e: raise IntegrationCallbackError(e.message) return { 'pages': pages }
python
{ "resource": "" }
q271304
IntegrationManager.get_settings
test
def get_settings(self, integration_id): """Return settings for given integration as a dictionary.""" try: integration = self.get(integration_id=integration_id) return json.loads(integration.settings) except (self.model.DoesNotExist, ValueError): return {}
python
{ "resource": "" }
q271305
IntegrationManager.update_settings
test
def update_settings(self, integration_id, settings): """Updates settings for given integration.""" (integration, created) = self.get_or_create(integration_id=integration_id) try: current_settings = json.loads(integration.settings) except ValueError: current_settings = {} current_settings.update(settings) integration.settings = json.dumps(current_settings) integration.save()
python
{ "resource": "" }
q271306
signup
test
def signup(request, uuid=None): """Handles requests to the user signup page.""" invite = get_object_or_404(Invite.objects.all(), id=uuid) if invite.expiration_date < timezone.now(): invite.delete() raise Http404('This page does not exist.') if request.method == 'POST': form = SignUpForm(request.POST) if form.is_valid(): user = form.save(commit=False) user.email = invite.email user.person = invite.person user.save() if invite.permissions == 'admin': group = Group.objects.get(name='Admin') user.groups.add(group) invite.delete() return redirect('dispatch-admin') else: return render( request, 'registration/signup.html', { 'form': form, 'email': invite.email } ) else: form = SignUpForm() return render( request, 'registration/signup.html', { 'form': form, 'email': invite.email } )
python
{ "resource": "" }
q271307
maptag
test
def maptag(tagname, contents): """Returns the HTML produced from enclosing each item in `contents` in a tag of type `tagname`""" return u''.join(tag(tagname, item) for item in contents)
python
{ "resource": "" }
q271308
zone
test
def zone(zone_id, **kwargs): """Renders the contents of the zone with given zone_id.""" try: zone = ThemeManager.Zones.get(zone_id) except ZoneNotFound: return '' try: return zone.widget.render(add_context=kwargs) except (WidgetNotFound, AttributeError): pass return ''
python
{ "resource": "" }
q271309
Publishable.save_featured_image
test
def save_featured_image(self, data): """ Handles saving the featured image. If data is None, the featured image will be removed. `data` should be dictionary with the following format: { 'image_id': int, 'caption': str, 'credit': str } """ attachment = self.featured_image if data is None: if attachment: attachment.delete() self.featured_image = None return if data['image_id'] is None: if attachment: attachment.delete() self.featured_image = None return if not attachment: attachment = ImageAttachment() attachment.image_id = data.get('image_id', attachment.image_id) attachment.caption = data.get('caption', None) attachment.credit = data.get('credit', None) instance_type = str(type(self)).lower() setattr(attachment, instance_type, self) attachment.save() self.featured_image = attachment
python
{ "resource": "" }
q271310
Article.save_subsection
test
def save_subsection(self, subsection_id): """ Save the subsection to the parent article """ Article.objects.filter(parent_id=self.parent.id).update(subsection_id=subsection_id)
python
{ "resource": "" }
q271311
Image.get_extension
test
def get_extension(self): """Returns the file extension.""" ext = os.path.splitext(self.img.name)[1] if ext: # Remove period from extension return ext[1:] return ext
python
{ "resource": "" }
q271312
Image.get_medium_url
test
def get_medium_url(self): """Returns the medium size image URL.""" if self.is_gif(): return self.get_absolute_url() return '%s%s-%s.jpg' % (settings.MEDIA_URL, self.get_name(), 'medium')
python
{ "resource": "" }
q271313
Image.save
test
def save(self, **kwargs): """Custom save method to process thumbnails and save image dimensions.""" is_new = self.pk is None if is_new: # Make filenames lowercase self.img.name = self.img.name.lower() # Call super method super(Image, self).save(**kwargs) if is_new and self.img: data = self.img.read() if not data: return image = Img.open(StringIO.StringIO(data)) self.width, self.height = image.size super(Image, self).save() name = self.get_name() ext = self.get_extension() for size in self.SIZES.keys(): self.save_thumbnail(image, self.SIZES[size], name, size, ext)
python
{ "resource": "" }
q271314
Image.save_thumbnail
test
def save_thumbnail(self, image, size, name, label, file_type): """Processes and saves a resized thumbnail version of the image.""" width, height = size (imw, imh) = image.size # If image is larger than thumbnail size, resize image if (imw > width) or (imh > height): image.thumbnail(size, Img.ANTIALIAS) # Attach new thumbnail label to image filename name = "%s-%s.jpg" % (name, label) # Image.save format takes JPEG not jpg if file_type in self.JPG_FORMATS: file_type = 'JPEG' # Write new thumbnail to StringIO object image_io = StringIO.StringIO() image.save(image_io, format=file_type, quality=75) # Convert StringIO object to Django File object thumb_file = InMemoryUploadedFile(image_io, None, name, 'image/jpeg', image_io.len, None) # Save the new file to the default storage system default_storage.save(name, thumb_file)
python
{ "resource": "" }
q271315
MySQL.connection
test
def connection(self): """Attempts to connect to the MySQL server. :return: Bound MySQL connection object if successful or ``None`` if unsuccessful. """ ctx = _app_ctx_stack.top if ctx is not None: if not hasattr(ctx, 'mysql_db'): ctx.mysql_db = self.connect return ctx.mysql_db
python
{ "resource": "" }
q271316
BandwidthLimiter.get_bandwith_limited_stream
test
def get_bandwith_limited_stream(self, fileobj, transfer_coordinator, enabled=True): """Wraps a fileobj in a bandwidth limited stream wrapper :type fileobj: file-like obj :param fileobj: The file-like obj to wrap :type transfer_coordinator: s3transfer.futures.TransferCoordinator param transfer_coordinator: The coordinator for the general transfer that the wrapped stream is a part of :type enabled: boolean :param enabled: Whether bandwidth limiting should be enabled to start """ stream = BandwidthLimitedStream( fileobj, self._leaky_bucket, transfer_coordinator, self._time_utils) if not enabled: stream.disable_bandwidth_limiting() return stream
python
{ "resource": "" }
q271317
BandwidthLimitedStream.read
test
def read(self, amount): """Read a specified amount Reads will only be throttled if bandwidth limiting is enabled. """ if not self._bandwidth_limiting_enabled: return self._fileobj.read(amount) # We do not want to be calling consume on every read as the read # amounts can be small causing the lock of the leaky bucket to # introduce noticeable overhead. So instead we keep track of # how many bytes we have seen and only call consume once we pass a # certain threshold. self._bytes_seen += amount if self._bytes_seen < self._bytes_threshold: return self._fileobj.read(amount) self._consume_through_leaky_bucket() return self._fileobj.read(amount)
python
{ "resource": "" }
q271318
LeakyBucket.consume
test
def consume(self, amt, request_token): """Consume an a requested amount :type amt: int :param amt: The amount of bytes to request to consume :type request_token: RequestToken :param request_token: The token associated to the consumption request that is used to identify the request. So if a RequestExceededException is raised the token should be used in subsequent retry consume() request. :raises RequestExceededException: If the consumption amount would exceed the maximum allocated bandwidth :rtype: int :returns: The amount consumed """ with self._lock: time_now = self._time_utils.time() if self._consumption_scheduler.is_scheduled(request_token): return self._release_requested_amt_for_scheduled_request( amt, request_token, time_now) elif self._projected_to_exceed_max_rate(amt, time_now): self._raise_request_exceeded_exception( amt, request_token, time_now) else: return self._release_requested_amt(amt, time_now)
python
{ "resource": "" }
q271319
ConsumptionScheduler.schedule_consumption
test
def schedule_consumption(self, amt, token, time_to_consume): """Schedules a wait time to be able to consume an amount :type amt: int :param amt: The amount of bytes scheduled to be consumed :type token: RequestToken :param token: The token associated to the consumption request that is used to identify the request. :type time_to_consume: float :param time_to_consume: The desired time it should take for that specific request amount to be consumed in regardless of previously scheduled consumption requests :rtype: float :returns: The amount of time to wait for the specific request before actually consuming the specified amount. """ self._total_wait += time_to_consume self._tokens_to_scheduled_consumption[token] = { 'wait_duration': self._total_wait, 'time_to_consume': time_to_consume, } return self._total_wait
python
{ "resource": "" }
q271320
ConsumptionScheduler.process_scheduled_consumption
test
def process_scheduled_consumption(self, token): """Processes a scheduled consumption request that has completed :type token: RequestToken :param token: The token associated to the consumption request that is used to identify the request. """ scheduled_retry = self._tokens_to_scheduled_consumption.pop(token) self._total_wait = max( self._total_wait - scheduled_retry['time_to_consume'], 0)
python
{ "resource": "" }
q271321
BandwidthRateTracker.get_projected_rate
test
def get_projected_rate(self, amt, time_at_consumption): """Get the projected rate using a provided amount and time :type amt: int :param amt: The proposed amount to consume :type time_at_consumption: float :param time_at_consumption: The proposed time to consume at :rtype: float :returns: The consumption rate if that amt and time were consumed """ if self._last_time is None: return 0.0 return self._calculate_exponential_moving_average_rate( amt, time_at_consumption)
python
{ "resource": "" }
q271322
BandwidthRateTracker.record_consumption_rate
test
def record_consumption_rate(self, amt, time_at_consumption): """Record the consumption rate based off amount and time point :type amt: int :param amt: The amount that got consumed :type time_at_consumption: float :param time_at_consumption: The time at which the amount was consumed """ if self._last_time is None: self._last_time = time_at_consumption self._current_rate = 0.0 return self._current_rate = self._calculate_exponential_moving_average_rate( amt, time_at_consumption) self._last_time = time_at_consumption
python
{ "resource": "" }
q271323
ProcessPoolDownloader.download_file
test
def download_file(self, bucket, key, filename, extra_args=None, expected_size=None): """Downloads the object's contents to a file :type bucket: str :param bucket: The name of the bucket to download from :type key: str :param key: The name of the key to download from :type filename: str :param filename: The name of a file to download to. :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type expected_size: int :param expected_size: The expected size in bytes of the download. If provided, the downloader will not call HeadObject to determine the object's size and use the provided value instead. The size is needed to determine whether to do a multipart download. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the download """ self._start_if_needed() if extra_args is None: extra_args = {} self._validate_all_known_args(extra_args) transfer_id = self._transfer_monitor.notify_new_transfer() download_file_request = DownloadFileRequest( transfer_id=transfer_id, bucket=bucket, key=key, filename=filename, extra_args=extra_args, expected_size=expected_size, ) logger.debug( 'Submitting download file request: %s.', download_file_request) self._download_request_queue.put(download_file_request) call_args = CallArgs( bucket=bucket, key=key, filename=filename, extra_args=extra_args, expected_size=expected_size) future = self._get_transfer_future(transfer_id, call_args) return future
python
{ "resource": "" }
q271324
TransferMonitor.poll_for_result
test
def poll_for_result(self, transfer_id): """Poll for the result of a transfer :param transfer_id: Unique identifier for the transfer :return: If the transfer succeeded, it will return the result. If the transfer failed, it will raise the exception associated to the failure. """ self._transfer_states[transfer_id].wait_till_done() exception = self._transfer_states[transfer_id].exception if exception: raise exception return None
python
{ "resource": "" }
q271325
get_callbacks
test
def get_callbacks(transfer_future, callback_type): """Retrieves callbacks from a subscriber :type transfer_future: s3transfer.futures.TransferFuture :param transfer_future: The transfer future the subscriber is associated to. :type callback_type: str :param callback_type: The type of callback to retrieve from the subscriber. Valid types include: * 'queued' * 'progress' * 'done' :returns: A list of callbacks for the type specified. All callbacks are preinjected with the transfer future. """ callbacks = [] for subscriber in transfer_future.meta.call_args.subscribers: callback_name = 'on_' + callback_type if hasattr(subscriber, callback_name): callbacks.append( functools.partial( getattr(subscriber, callback_name), future=transfer_future ) ) return callbacks
python
{ "resource": "" }
q271326
get_filtered_dict
test
def get_filtered_dict(original_dict, whitelisted_keys): """Gets a dictionary filtered by whitelisted keys :param original_dict: The original dictionary of arguments to source keys and values. :param whitelisted_key: A list of keys to include in the filtered dictionary. :returns: A dictionary containing key/values from the original dictionary whose key was included in the whitelist """ filtered_dict = {} for key, value in original_dict.items(): if key in whitelisted_keys: filtered_dict[key] = value return filtered_dict
python
{ "resource": "" }
q271327
CountCallbackInvoker.decrement
test
def decrement(self): """Decrement the count by one""" with self._lock: if self._count == 0: raise RuntimeError( 'Counter is at zero. It cannot dip below zero') self._count -= 1 if self._is_finalized and self._count == 0: self._callback()
python
{ "resource": "" }
q271328
CountCallbackInvoker.finalize
test
def finalize(self): """Finalize the counter Once finalized, the counter never be incremented and the callback can be invoked once the count reaches zero """ with self._lock: self._is_finalized = True if self._count == 0: self._callback()
python
{ "resource": "" }
q271329
OSUtils.is_special_file
test
def is_special_file(cls, filename): """Checks to see if a file is a special UNIX file. It checks if the file is a character special device, block special device, FIFO, or socket. :param filename: Name of the file :returns: True if the file is a special file. False, if is not. """ # If it does not exist, it must be a new file so it cannot be # a special file. if not os.path.exists(filename): return False mode = os.stat(filename).st_mode # Character special device. if stat.S_ISCHR(mode): return True # Block special device if stat.S_ISBLK(mode): return True # Named pipe / FIFO if stat.S_ISFIFO(mode): return True # Socket. if stat.S_ISSOCK(mode): return True return False
python
{ "resource": "" }
q271330
TaskSemaphore.acquire
test
def acquire(self, tag, blocking=True): """Acquire the semaphore :param tag: A tag identifying what is acquiring the semaphore. Note that this is not really needed to directly use this class but is needed for API compatibility with the SlidingWindowSemaphore implementation. :param block: If True, block until it can be acquired. If False, do not block and raise an exception if cannot be aquired. :returns: A token (can be None) to use when releasing the semaphore """ logger.debug("Acquiring %s", tag) if not self._semaphore.acquire(blocking): raise NoResourcesAvailable("Cannot acquire tag '%s'" % tag)
python
{ "resource": "" }
q271331
TaskSemaphore.release
test
def release(self, tag, acquire_token): """Release the semaphore :param tag: A tag identifying what is releasing the semaphore :param acquire_token: The token returned from when the semaphore was acquired. Note that this is not really needed to directly use this class but is needed for API compatibility with the SlidingWindowSemaphore implementation. """ logger.debug("Releasing acquire %s/%s" % (tag, acquire_token)) self._semaphore.release()
python
{ "resource": "" }
q271332
ChunksizeAdjuster.adjust_chunksize
test
def adjust_chunksize(self, current_chunksize, file_size=None): """Get a chunksize close to current that fits within all S3 limits. :type current_chunksize: int :param current_chunksize: The currently configured chunksize. :type file_size: int or None :param file_size: The size of the file to upload. This might be None if the object being transferred has an unknown size. :returns: A valid chunksize that fits within configured limits. """ chunksize = current_chunksize if file_size is not None: chunksize = self._adjust_for_max_parts(chunksize, file_size) return self._adjust_for_chunksize_limits(chunksize)
python
{ "resource": "" }
q271333
DownloadOutputManager.queue_file_io_task
test
def queue_file_io_task(self, fileobj, data, offset): """Queue IO write for submission to the IO executor. This method accepts an IO executor and information about the downloaded data, and handles submitting this to the IO executor. This method may defer submission to the IO executor if necessary. """ self._transfer_coordinator.submit( self._io_executor, self.get_io_write_task(fileobj, data, offset) )
python
{ "resource": "" }
q271334
DownloadOutputManager.get_io_write_task
test
def get_io_write_task(self, fileobj, data, offset): """Get an IO write task for the requested set of data This task can be ran immediately or be submitted to the IO executor for it to run. :type fileobj: file-like object :param fileobj: The file-like object to write to :type data: bytes :param data: The data to write out :type offset: integer :param offset: The offset to write the data to in the file-like object :returns: An IO task to be used to write data to a file-like object """ return IOWriteTask( self._transfer_coordinator, main_kwargs={ 'fileobj': fileobj, 'data': data, 'offset': offset, } )
python
{ "resource": "" }
q271335
DownloadSubmissionTask._get_download_output_manager_cls
test
def _get_download_output_manager_cls(self, transfer_future, osutil): """Retrieves a class for managing output for a download :type transfer_future: s3transfer.futures.TransferFuture :param transfer_future: The transfer future for the request :type osutil: s3transfer.utils.OSUtils :param osutil: The os utility associated to the transfer :rtype: class of DownloadOutputManager :returns: The appropriate class to use for managing a specific type of input for downloads. """ download_manager_resolver_chain = [ DownloadSpecialFilenameOutputManager, DownloadFilenameOutputManager, DownloadSeekableOutputManager, DownloadNonSeekableOutputManager, ] fileobj = transfer_future.meta.call_args.fileobj for download_manager_cls in download_manager_resolver_chain: if download_manager_cls.is_compatible(fileobj, osutil): return download_manager_cls raise RuntimeError( 'Output %s of type: %s is not supported.' % ( fileobj, type(fileobj)))
python
{ "resource": "" }
q271336
GetObjectTask._main
test
def _main(self, client, bucket, key, fileobj, extra_args, callbacks, max_attempts, download_output_manager, io_chunksize, start_index=0, bandwidth_limiter=None): """Downloads an object and places content into io queue :param client: The client to use when calling GetObject :param bucket: The bucket to download from :param key: The key to download from :param fileobj: The file handle to write content to :param exta_args: Any extra arguements to include in GetObject request :param callbacks: List of progress callbacks to invoke on download :param max_attempts: The number of retries to do when downloading :param download_output_manager: The download output manager associated with the current download. :param io_chunksize: The size of each io chunk to read from the download stream and queue in the io queue. :param start_index: The location in the file to start writing the content of the key to. :param bandwidth_limiter: The bandwidth limiter to use when throttling the downloading of data in streams. """ last_exception = None for i in range(max_attempts): try: response = client.get_object( Bucket=bucket, Key=key, **extra_args) streaming_body = StreamReaderProgress( response['Body'], callbacks) if bandwidth_limiter: streaming_body = \ bandwidth_limiter.get_bandwith_limited_stream( streaming_body, self._transfer_coordinator) current_index = start_index chunks = DownloadChunkIterator(streaming_body, io_chunksize) for chunk in chunks: # If the transfer is done because of a cancellation # or error somewhere else, stop trying to submit more # data to be written and break out of the download. if not self._transfer_coordinator.done(): self._handle_io( download_output_manager, fileobj, chunk, current_index ) current_index += len(chunk) else: return return except S3_RETRYABLE_DOWNLOAD_ERRORS as e: logger.debug("Retrying exception caught (%s), " "retrying request, (attempt %s / %s)", e, i, max_attempts, exc_info=True) last_exception = e # Also invoke the progress callbacks to indicate that we # are trying to download the stream again and all progress # for this GetObject has been lost. invoke_progress_callbacks( callbacks, start_index - current_index) continue raise RetriesExceededError(last_exception)
python
{ "resource": "" }
q271337
IOWriteTask._main
test
def _main(self, fileobj, data, offset): """Pulls off an io queue to write contents to a file :param fileobj: The file handle to write content to :param data: The data to write :param offset: The offset to write the data to. """ fileobj.seek(offset) fileobj.write(data)
python
{ "resource": "" }
q271338
DeferQueue.request_writes
test
def request_writes(self, offset, data): """Request any available writes given new incoming data. You call this method by providing new data along with the offset associated with the data. If that new data unlocks any contiguous writes that can now be submitted, this method will return all applicable writes. This is done with 1 method call so you don't have to make two method calls (put(), get()) which acquires a lock each method call. """ if offset < self._next_offset: # This is a request for a write that we've already # seen. This can happen in the event of a retry # where if we retry at at offset N/2, we'll requeue # offsets 0-N/2 again. return [] writes = [] if offset in self._pending_offsets: # We've already queued this offset so this request is # a duplicate. In this case we should ignore # this request and prefer what's already queued. return [] heapq.heappush(self._writes, (offset, data)) self._pending_offsets.add(offset) while self._writes and self._writes[0][0] == self._next_offset: next_write = heapq.heappop(self._writes) writes.append({'offset': next_write[0], 'data': next_write[1]}) self._pending_offsets.remove(next_write[0]) self._next_offset += len(next_write[1]) return writes
python
{ "resource": "" }
q271339
seekable
test
def seekable(fileobj): """Backwards compat function to determine if a fileobj is seekable :param fileobj: The file-like object to determine if seekable :returns: True, if seekable. False, otherwise. """ # If the fileobj has a seekable attr, try calling the seekable() # method on it. if hasattr(fileobj, 'seekable'): return fileobj.seekable() # If there is no seekable attr, check if the object can be seeked # or telled. If it can, try to seek to the current position. elif hasattr(fileobj, 'seek') and hasattr(fileobj, 'tell'): try: fileobj.seek(0, 1) return True except (OSError, IOError): # If an io related error was thrown then it is not seekable. return False # Else, the fileobj is not seekable return False
python
{ "resource": "" }
q271340
TransferManager.upload
test
def upload(self, fileobj, bucket, key, extra_args=None, subscribers=None): """Uploads a file to S3 :type fileobj: str or seekable file-like object :param fileobj: The name of a file to upload or a seekable file-like object to upload. It is recommended to use a filename because file-like objects may result in higher memory usage. :type bucket: str :param bucket: The name of the bucket to upload to :type key: str :param key: The name of the key to upload to :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type subscribers: list(s3transfer.subscribers.BaseSubscriber) :param subscribers: The list of subscribers to be invoked in the order provided based on the event emit during the process of the transfer request. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the upload """ if extra_args is None: extra_args = {} if subscribers is None: subscribers = [] self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS) call_args = CallArgs( fileobj=fileobj, bucket=bucket, key=key, extra_args=extra_args, subscribers=subscribers ) extra_main_kwargs = {} if self._bandwidth_limiter: extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter return self._submit_transfer( call_args, UploadSubmissionTask, extra_main_kwargs)
python
{ "resource": "" }
q271341
TransferManager.download
test
def download(self, bucket, key, fileobj, extra_args=None, subscribers=None): """Downloads a file from S3 :type bucket: str :param bucket: The name of the bucket to download from :type key: str :param key: The name of the key to download from :type fileobj: str or seekable file-like object :param fileobj: The name of a file to download or a seekable file-like object to download. It is recommended to use a filename because file-like objects may result in higher memory usage. :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type subscribers: list(s3transfer.subscribers.BaseSubscriber) :param subscribers: The list of subscribers to be invoked in the order provided based on the event emit during the process of the transfer request. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the download """ if extra_args is None: extra_args = {} if subscribers is None: subscribers = [] self._validate_all_known_args(extra_args, self.ALLOWED_DOWNLOAD_ARGS) call_args = CallArgs( bucket=bucket, key=key, fileobj=fileobj, extra_args=extra_args, subscribers=subscribers ) extra_main_kwargs = {'io_executor': self._io_executor} if self._bandwidth_limiter: extra_main_kwargs['bandwidth_limiter'] = self._bandwidth_limiter return self._submit_transfer( call_args, DownloadSubmissionTask, extra_main_kwargs)
python
{ "resource": "" }
q271342
TransferManager.copy
test
def copy(self, copy_source, bucket, key, extra_args=None, subscribers=None, source_client=None): """Copies a file in S3 :type copy_source: dict :param copy_source: The name of the source bucket, key name of the source object, and optional version ID of the source object. The dictionary format is: ``{'Bucket': 'bucket', 'Key': 'key', 'VersionId': 'id'}``. Note that the ``VersionId`` key is optional and may be omitted. :type bucket: str :param bucket: The name of the bucket to copy to :type key: str :param key: The name of the key to copy to :type extra_args: dict :param extra_args: Extra arguments that may be passed to the client operation :type subscribers: a list of subscribers :param subscribers: The list of subscribers to be invoked in the order provided based on the event emit during the process of the transfer request. :type source_client: botocore or boto3 Client :param source_client: The client to be used for operation that may happen at the source object. For example, this client is used for the head_object that determines the size of the copy. If no client is provided, the transfer manager's client is used as the client for the source object. :rtype: s3transfer.futures.TransferFuture :returns: Transfer future representing the copy """ if extra_args is None: extra_args = {} if subscribers is None: subscribers = [] if source_client is None: source_client = self._client self._validate_all_known_args(extra_args, self.ALLOWED_COPY_ARGS) call_args = CallArgs( copy_source=copy_source, bucket=bucket, key=key, extra_args=extra_args, subscribers=subscribers, source_client=source_client ) return self._submit_transfer(call_args, CopySubmissionTask)
python
{ "resource": "" }
q271343
TransferManager.delete
test
def delete(self, bucket, key, extra_args=None, subscribers=None): """Delete an S3 object. :type bucket: str :param bucket: The name of the bucket. :type key: str :param key: The name of the S3 object to delete. :type extra_args: dict :param extra_args: Extra arguments that may be passed to the DeleteObject call. :type subscribers: list :param subscribers: A list of subscribers to be invoked during the process of the transfer request. Note that the ``on_progress`` callback is not invoked during object deletion. :rtype: s3transfer.futures.TransferFuture :return: Transfer future representing the deletion. """ if extra_args is None: extra_args = {} if subscribers is None: subscribers = [] self._validate_all_known_args(extra_args, self.ALLOWED_DELETE_ARGS) call_args = CallArgs( bucket=bucket, key=key, extra_args=extra_args, subscribers=subscribers ) return self._submit_transfer(call_args, DeleteSubmissionTask)
python
{ "resource": "" }
q271344
TransferManager.shutdown
test
def shutdown(self, cancel=False, cancel_msg=''): """Shutdown the TransferManager It will wait till all transfers complete before it completely shuts down. :type cancel: boolean :param cancel: If True, calls TransferFuture.cancel() for all in-progress in transfers. This is useful if you want the shutdown to happen quicker. :type cancel_msg: str :param cancel_msg: The message to specify if canceling all in-progress transfers. """ self._shutdown(cancel, cancel, cancel_msg)
python
{ "resource": "" }
q271345
TransferCoordinatorController.cancel
test
def cancel(self, msg='', exc_type=CancelledError): """Cancels all inprogress transfers This cancels the inprogress transfers by calling cancel() on all tracked transfer coordinators. :param msg: The message to pass on to each transfer coordinator that gets cancelled. :param exc_type: The type of exception to set for the cancellation """ for transfer_coordinator in self.tracked_transfer_coordinators: transfer_coordinator.cancel(msg, exc_type)
python
{ "resource": "" }
q271346
TransferCoordinatorController.wait
test
def wait(self): """Wait until there are no more inprogress transfers This will not stop when failures are encountered and not propogate any of these errors from failed transfers, but it can be interrupted with a KeyboardInterrupt. """ try: transfer_coordinator = None for transfer_coordinator in self.tracked_transfer_coordinators: transfer_coordinator.result() except KeyboardInterrupt: logger.debug('Received KeyboardInterrupt in wait()') # If Keyboard interrupt is raised while waiting for # the result, then exit out of the wait and raise the # exception if transfer_coordinator: logger.debug( 'On KeyboardInterrupt was waiting for %s', transfer_coordinator) raise except Exception: # A general exception could have been thrown because # of result(). We just want to ignore this and continue # because we at least know that the transfer coordinator # has completed. pass
python
{ "resource": "" }
q271347
UploadNonSeekableInputManager._read
test
def _read(self, fileobj, amount, truncate=True): """ Reads a specific amount of data from a stream and returns it. If there is any data in initial_data, that will be popped out first. :type fileobj: A file-like object that implements read :param fileobj: The stream to read from. :type amount: int :param amount: The number of bytes to read from the stream. :type truncate: bool :param truncate: Whether or not to truncate initial_data after reading from it. :return: Generator which generates part bodies from the initial data. """ # If the the initial data is empty, we simply read from the fileobj if len(self._initial_data) == 0: return fileobj.read(amount) # If the requested number of bytes is less than the amount of # initial data, pull entirely from initial data. if amount <= len(self._initial_data): data = self._initial_data[:amount] # Truncate initial data so we don't hang onto the data longer # than we need. if truncate: self._initial_data = self._initial_data[amount:] return data # At this point there is some initial data left, but not enough to # satisfy the number of bytes requested. Pull out the remaining # initial data and read the rest from the fileobj. amount_to_read = amount - len(self._initial_data) data = self._initial_data + fileobj.read(amount_to_read) # Zero out initial data so we don't hang onto the data any more. if truncate: self._initial_data = b'' return data
python
{ "resource": "" }
q271348
UploadNonSeekableInputManager._wrap_data
test
def _wrap_data(self, data, callbacks, close_callbacks): """ Wraps data with the interrupt reader and the file chunk reader. :type data: bytes :param data: The data to wrap. :type callbacks: list :param callbacks: The callbacks associated with the transfer future. :type close_callbacks: list :param close_callbacks: The callbacks to be called when closing the wrapper for the data. :return: Fully wrapped data. """ fileobj = self._wrap_fileobj(six.BytesIO(data)) return self._osutil.open_file_chunk_reader_from_fileobj( fileobj=fileobj, chunk_size=len(data), full_file_size=len(data), callbacks=callbacks, close_callbacks=close_callbacks)
python
{ "resource": "" }
q271349
UploadSubmissionTask._get_upload_input_manager_cls
test
def _get_upload_input_manager_cls(self, transfer_future): """Retrieves a class for managing input for an upload based on file type :type transfer_future: s3transfer.futures.TransferFuture :param transfer_future: The transfer future for the request :rtype: class of UploadInputManager :returns: The appropriate class to use for managing a specific type of input for uploads. """ upload_manager_resolver_chain = [ UploadFilenameInputManager, UploadSeekableInputManager, UploadNonSeekableInputManager ] fileobj = transfer_future.meta.call_args.fileobj for upload_manager_cls in upload_manager_resolver_chain: if upload_manager_cls.is_compatible(fileobj): return upload_manager_cls raise RuntimeError( 'Input %s of type: %s is not supported.' % ( fileobj, type(fileobj)))
python
{ "resource": "" }
q271350
TransferFuture.set_exception
test
def set_exception(self, exception): """Sets the exception on the future.""" if not self.done(): raise TransferNotDoneError( 'set_exception can only be called once the transfer is ' 'complete.') self._coordinator.set_exception(exception, override=True)
python
{ "resource": "" }
q271351
TransferCoordinator.set_result
test
def set_result(self, result): """Set a result for the TransferFuture Implies that the TransferFuture succeeded. This will always set a result because it is invoked on the final task where there is only ever one final task and it is ran at the very end of a transfer process. So if a result is being set for this final task, the transfer succeeded even if something came a long and canceled the transfer on the final task. """ with self._lock: self._exception = None self._result = result self._status = 'success'
python
{ "resource": "" }
q271352
TransferCoordinator.set_exception
test
def set_exception(self, exception, override=False): """Set an exception for the TransferFuture Implies the TransferFuture failed. :param exception: The exception that cause the transfer to fail. :param override: If True, override any existing state. """ with self._lock: if not self.done() or override: self._exception = exception self._status = 'failed'
python
{ "resource": "" }
q271353
TransferCoordinator.result
test
def result(self): """Waits until TransferFuture is done and returns the result If the TransferFuture succeeded, it will return the result. If the TransferFuture failed, it will raise the exception associated to the failure. """ # Doing a wait() with no timeout cannot be interrupted in python2 but # can be interrupted in python3 so we just wait with the largest # possible value integer value, which is on the scale of billions of # years... self._done_event.wait(MAXINT) # Once done waiting, raise an exception if present or return the # final result. if self._exception: raise self._exception return self._result
python
{ "resource": "" }
q271354
TransferCoordinator.cancel
test
def cancel(self, msg='', exc_type=CancelledError): """Cancels the TransferFuture :param msg: The message to attach to the cancellation :param exc_type: The type of exception to set for the cancellation """ with self._lock: if not self.done(): should_announce_done = False logger.debug('%s cancel(%s) called', self, msg) self._exception = exc_type(msg) if self._status == 'not-started': should_announce_done = True self._status = 'cancelled' if should_announce_done: self.announce_done()
python
{ "resource": "" }
q271355
TransferCoordinator.submit
test
def submit(self, executor, task, tag=None): """Submits a task to a provided executor :type executor: s3transfer.futures.BoundedExecutor :param executor: The executor to submit the callable to :type task: s3transfer.tasks.Task :param task: The task to submit to the executor :type tag: s3transfer.futures.TaskTag :param tag: A tag to associate to the submitted task :rtype: concurrent.futures.Future :returns: A future representing the submitted task """ logger.debug( "Submitting task %s to executor %s for transfer request: %s." % ( task, executor, self.transfer_id) ) future = executor.submit(task, tag=tag) # Add this created future to the list of associated future just # in case it is needed during cleanups. self.add_associated_future(future) future.add_done_callback( FunctionContainer(self.remove_associated_future, future)) return future
python
{ "resource": "" }
q271356
TransferCoordinator.add_done_callback
test
def add_done_callback(self, function, *args, **kwargs): """Add a done callback to be invoked when transfer is done""" with self._done_callbacks_lock: self._done_callbacks.append( FunctionContainer(function, *args, **kwargs) )
python
{ "resource": "" }
q271357
TransferCoordinator.add_failure_cleanup
test
def add_failure_cleanup(self, function, *args, **kwargs): """Adds a callback to call upon failure""" with self._failure_cleanups_lock: self._failure_cleanups.append( FunctionContainer(function, *args, **kwargs))
python
{ "resource": "" }
q271358
TransferCoordinator.announce_done
test
def announce_done(self): """Announce that future is done running and run associated callbacks This will run any failure cleanups if the transfer failed if not they have not been run, allows the result() to be unblocked, and will run any done callbacks associated to the TransferFuture if they have not already been ran. """ if self.status != 'success': self._run_failure_cleanups() self._done_event.set() self._run_done_callbacks()
python
{ "resource": "" }
q271359
BoundedExecutor.submit
test
def submit(self, task, tag=None, block=True): """Submit a task to complete :type task: s3transfer.tasks.Task :param task: The task to run __call__ on :type tag: s3transfer.futures.TaskTag :param tag: An optional tag to associate to the task. This is used to override which semaphore to use. :type block: boolean :param block: True if to wait till it is possible to submit a task. False, if not to wait and raise an error if not able to submit a task. :returns: The future assocaited to the submitted task """ semaphore = self._semaphore # If a tag was provided, use the semaphore associated to that # tag. if tag: semaphore = self._tag_semaphores[tag] # Call acquire on the semaphore. acquire_token = semaphore.acquire(task.transfer_id, block) # Create a callback to invoke when task is done in order to call # release on the semaphore. release_callback = FunctionContainer( semaphore.release, task.transfer_id, acquire_token) # Submit the task to the underlying executor. future = ExecutorFuture(self._executor.submit(task)) # Add the Semaphore.release() callback to the future such that # it is invoked once the future completes. future.add_done_callback(release_callback) return future
python
{ "resource": "" }
q271360
ExecutorFuture.add_done_callback
test
def add_done_callback(self, fn): """Adds a callback to be completed once future is done :parm fn: A callable that takes no arguments. Note that is different than concurrent.futures.Future.add_done_callback that requires a single argument for the future. """ # The done callback for concurrent.futures.Future will always pass a # the future in as the only argument. So we need to create the # proper signature wrapper that will invoke the callback provided. def done_callback(future_passed_to_callback): return fn() self._future.add_done_callback(done_callback)
python
{ "resource": "" }
q271361
S3Transfer.upload_file
test
def upload_file(self, filename, bucket, key, callback=None, extra_args=None): """Upload a file to an S3 object. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.upload_file() directly. """ if extra_args is None: extra_args = {} self._validate_all_known_args(extra_args, self.ALLOWED_UPLOAD_ARGS) events = self._client.meta.events events.register_first('request-created.s3', disable_upload_callbacks, unique_id='s3upload-callback-disable') events.register_last('request-created.s3', enable_upload_callbacks, unique_id='s3upload-callback-enable') if self._osutil.get_file_size(filename) >= \ self._config.multipart_threshold: self._multipart_upload(filename, bucket, key, callback, extra_args) else: self._put_object(filename, bucket, key, callback, extra_args)
python
{ "resource": "" }
q271362
S3Transfer.download_file
test
def download_file(self, bucket, key, filename, extra_args=None, callback=None): """Download an S3 object to a file. Variants have also been injected into S3 client, Bucket and Object. You don't have to use S3Transfer.download_file() directly. """ # This method will issue a ``head_object`` request to determine # the size of the S3 object. This is used to determine if the # object is downloaded in parallel. if extra_args is None: extra_args = {} self._validate_all_known_args(extra_args, self.ALLOWED_DOWNLOAD_ARGS) object_size = self._object_size(bucket, key, extra_args) temp_filename = filename + os.extsep + random_file_extension() try: self._download_file(bucket, key, temp_filename, object_size, extra_args, callback) except Exception: logger.debug("Exception caught in download_file, removing partial " "file: %s", temp_filename, exc_info=True) self._osutil.remove_file(temp_filename) raise else: self._osutil.rename_file(temp_filename, filename)
python
{ "resource": "" }
q271363
ParsoPythonFile._iter_step_func_decorators
test
def _iter_step_func_decorators(self): """Find functions with step decorator in parsed file""" func_defs = [func for func in self.py_tree.iter_funcdefs()] + [func for cls in self.py_tree.iter_classdefs() for func in cls.iter_funcdefs()] for func in func_defs: for decorator in func.get_decorators(): if decorator.children[1].value == 'step': yield func, decorator break
python
{ "resource": "" }
q271364
ParsoPythonFile._step_decorator_args
test
def _step_decorator_args(self, decorator): """ Get the arguments passed to step decorators converted to python objects. """ args = decorator.children[3:-2] step = None if len(args) == 1: try: step = ast.literal_eval(args[0].get_code()) except (ValueError, SyntaxError): pass if isinstance(step, six.string_types+(list,)): return step logging.error("Decorator step accepts either a string or a list of strings - %s:%d", self.file_path, decorator.start_pos[0]) else: logging.error("Decorator step accepts only one argument - %s:%d", self.file_path, decorator.start_pos[0])
python
{ "resource": "" }
q271365
ParsoPythonFile.refactor_step
test
def refactor_step(self, old_text, new_text, move_param_from_idx): """ Find the step with old_text and change it to new_text.The step function parameters are also changed according to move_param_from_idx. Each entry in this list should specify parameter position from old. """ diffs = [] step, func = self._find_step_node(old_text) if step is None: return diffs step_diff = self._refactor_step_text(step, old_text, new_text) diffs.append(step_diff) params_list_node = func.children[2] moved_params = self._move_param_nodes( params_list_node.children, move_param_from_idx) if params_list_node.children is not moved_params: # Record original parameter list span excluding braces params_span = self._span_from_pos( params_list_node.children[0].end_pos, params_list_node.children[-1].start_pos) params_list_node.children = moved_params # Get code for moved paramters excluding braces param_code = ''.join(p.get_code() for p in moved_params[1:-1]) diffs.append((params_span, param_code)) return diffs
python
{ "resource": "" }
q271366
RedbaronPythonFile._iter_step_func_decorators
test
def _iter_step_func_decorators(self): """Find functions with step decorator in parsed file.""" for node in self.py_tree.find_all('def'): for decorator in node.decorators: if decorator.name.value == 'step': yield node, decorator break
python
{ "resource": "" }
q271367
RedbaronPythonFile._step_decorator_args
test
def _step_decorator_args(self, decorator): """ Get arguments passed to step decorators converted to python objects. """ args = decorator.call.value step = None if len(args) == 1: try: step = args[0].value.to_python() except (ValueError, SyntaxError): pass if isinstance(step, six.string_types + (list,)): return step logging.error("Decorator step accepts either a string or a list of \ strings - %s", self.file_path) else: logging.error("Decorator step accepts only one argument - %s", self.file_path)
python
{ "resource": "" }
q271368
RedbaronPythonFile.refactor_step
test
def refactor_step(self, old_text, new_text, move_param_from_idx): """ Find the step with old_text and change it to new_text. The step function parameters are also changed according to move_param_from_idx. Each entry in this list should specify parameter position from old """ diffs = [] step, func = self._find_step_node(old_text) if step is None: return diffs step_diff = self._refactor_step_text(step, old_text, new_text) diffs.append(step_diff) moved_params = self._move_params(func.arguments, move_param_from_idx) if func.arguments is not moved_params: params_span = self._span_for_node(func.arguments, False) func.arguments = moved_params diffs.append((params_span, func.arguments.dumps())) return diffs
python
{ "resource": "" }
q271369
PythonFile.select_python_parser
test
def select_python_parser(parser=None): """ Select default parser for loading and refactoring steps. Passing `redbaron` as argument will select the old paring engine from v0.3.3 Replacing the redbaron parser was necessary to support Python 3 syntax. We have tried our best to make sure there is no user impact on users. However, there may be regressions with new parser backend. To revert to the old parser implementation, add `GETGAUGE_USE_0_3_3_PARSER=true` property to the `python.properties` file in the `<PROJECT_DIR>/env/default directory. This property along with the redbaron parser will be removed in future releases. """ if parser == 'redbaron' or os.environ.get('GETGAUGE_USE_0_3_3_PARSER'): PythonFile.Class = RedbaronPythonFile else: PythonFile.Class = ParsoPythonFile
python
{ "resource": "" }
q271370
TeamMembershipsAPI.list
test
def list(self, teamId, max=None, **request_parameters): """List team memberships for a team, by ID. This method supports Webex Teams's implementation of RFC5988 Web Linking to provide pagination support. It returns a generator container that incrementally yields all team memberships returned by the query. The generator will automatically request additional 'pages' of responses from Webex as needed until all responses have been returned. The container makes the generator safe for reuse. A new API call will be made, using the same parameters that were specified when the generator was created, every time a new iterator is requested from the container. Args: teamId(basestring): List team memberships for a team, by ID. max(int): Limit the maximum number of items returned from the Webex Teams service per request. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: GeneratorContainer: A GeneratorContainer which, when iterated, yields the team memberships returned by the Webex Teams query. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(teamId, basestring, may_be_none=False) check_type(max, int) params = dict_from_items_with_values( request_parameters, teamId=teamId, max=max, ) # API request - get items items = self._session.get_items(API_ENDPOINT, params=params) # Yield team membership objects created from the returned items JSON # objects for item in items: yield self._object_factory(OBJECT_TYPE, item)
python
{ "resource": "" }
q271371
TeamMembershipsAPI.create
test
def create(self, teamId, personId=None, personEmail=None, isModerator=False, **request_parameters): """Add someone to a team by Person ID or email address. Add someone to a team by Person ID or email address; optionally making them a moderator. Args: teamId(basestring): The team ID. personId(basestring): The person ID. personEmail(basestring): The email address of the person. isModerator(bool): Set to True to make the person a team moderator. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: TeamMembership: A TeamMembership object with the details of the created team membership. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(teamId, basestring, may_be_none=False) check_type(personId, basestring) check_type(personEmail, basestring) check_type(isModerator, bool) post_data = dict_from_items_with_values( request_parameters, teamId=teamId, personId=personId, personEmail=personEmail, isModerator=isModerator, ) # API request json_data = self._session.post(API_ENDPOINT, json=post_data) # Return a team membership object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271372
TeamMembershipsAPI.update
test
def update(self, membershipId, isModerator=None, **request_parameters): """Update a team membership, by ID. Args: membershipId(basestring): The team membership ID. isModerator(bool): Set to True to make the person a team moderator. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: TeamMembership: A TeamMembership object with the updated Webex Teams team-membership details. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(membershipId, basestring, may_be_none=False) check_type(isModerator, bool) put_data = dict_from_items_with_values( request_parameters, isModerator=isModerator, ) # API request json_data = self._session.put(API_ENDPOINT + '/' + membershipId, json=put_data) # Return a team membership object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271373
TeamMembershipsAPI.delete
test
def delete(self, membershipId): """Delete a team membership, by ID. Args: membershipId(basestring): The team membership ID. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(membershipId, basestring, may_be_none=False) # API request self._session.delete(API_ENDPOINT + '/' + membershipId)
python
{ "resource": "" }
q271374
get_catfact
test
def get_catfact(): """Get a cat fact from catfact.ninja and return it as a string. Functions for Soundhound, Google, IBM Watson, or other APIs can be added to create the desired functionality into this bot. """ response = requests.get(CAT_FACTS_URL, verify=False) response.raise_for_status() json_data = response.json() return json_data['fact']
python
{ "resource": "" }
q271375
webhook.POST
test
def POST(self): """Respond to inbound webhook JSON HTTP POSTs from Webex Teams.""" # Get the POST data sent from Webex Teams json_data = web.data() print("\nWEBHOOK POST RECEIVED:") print(json_data, "\n") # Create a Webhook object from the JSON data webhook_obj = Webhook(json_data) # Get the room details room = api.rooms.get(webhook_obj.data.roomId) # Get the message details message = api.messages.get(webhook_obj.data.id) # Get the sender's details person = api.people.get(message.personId) print("NEW MESSAGE IN ROOM '{}'".format(room.title)) print("FROM '{}'".format(person.displayName)) print("MESSAGE '{}'\n".format(message.text)) # This is a VERY IMPORTANT loop prevention control step. # If you respond to all messages... You will respond to the messages # that the bot posts and thereby create a loop condition. me = api.people.me() if message.personId == me.id: # Message was sent by me (bot); do not respond. return 'OK' else: # Message was sent by someone else; parse message and respond. if "/CAT" in message.text: print("FOUND '/CAT'") # Get a cat fact cat_fact = get_catfact() print("SENDING CAT FACT '{}'".format(cat_fact)) # Post the fact to the room where the request was received api.messages.create(room.id, text=cat_fact) return 'OK'
python
{ "resource": "" }
q271376
MembershipsAPI.list
test
def list(self, roomId=None, personId=None, personEmail=None, max=None, **request_parameters): """List room memberships. By default, lists memberships for rooms to which the authenticated user belongs. Use query parameters to filter the response. Use `roomId` to list memberships for a room, by ID. Use either `personId` or `personEmail` to filter the results. This method supports Webex Teams's implementation of RFC5988 Web Linking to provide pagination support. It returns a generator container that incrementally yields all memberships returned by the query. The generator will automatically request additional 'pages' of responses from Webex as needed until all responses have been returned. The container makes the generator safe for reuse. A new API call will be made, using the same parameters that were specified when the generator was created, every time a new iterator is requested from the container. Args: roomId(basestring): Limit results to a specific room, by ID. personId(basestring): Limit results to a specific person, by ID. personEmail(basestring): Limit results to a specific person, by email address. max(int): Limit the maximum number of items returned from the Webex Teams service per request. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: GeneratorContainer: A GeneratorContainer which, when iterated, yields the memberships returned by the Webex Teams query. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(roomId, basestring) check_type(personId, basestring) check_type(personEmail, basestring) check_type(max, int) params = dict_from_items_with_values( request_parameters, roomId=roomId, personId=personId, personEmail=personEmail, max=max, ) # API request - get items items = self._session.get_items(API_ENDPOINT, params=params) # Yield membership objects created from the returned items JSON objects for item in items: yield self._object_factory(OBJECT_TYPE, item)
python
{ "resource": "" }
q271377
MembershipsAPI.delete
test
def delete(self, membershipId): """Delete a membership, by ID. Args: membershipId(basestring): The membership ID. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(membershipId, basestring) # API request self._session.delete(API_ENDPOINT + '/' + membershipId)
python
{ "resource": "" }
q271378
validate_base_url
test
def validate_base_url(base_url): """Verify that base_url specifies a protocol and network location.""" parsed_url = urllib.parse.urlparse(base_url) if parsed_url.scheme and parsed_url.netloc: return parsed_url.geturl() else: error_message = "base_url must contain a valid scheme (protocol " \ "specifier) and network location (hostname)" raise ValueError(error_message)
python
{ "resource": "" }
q271379
is_web_url
test
def is_web_url(string): """Check to see if string is an validly-formatted web url.""" assert isinstance(string, basestring) parsed_url = urllib.parse.urlparse(string) return ( ( parsed_url.scheme.lower() == 'http' or parsed_url.scheme.lower() == 'https' ) and parsed_url.netloc )
python
{ "resource": "" }
q271380
open_local_file
test
def open_local_file(file_path): """Open the file and return an EncodableFile tuple.""" assert isinstance(file_path, basestring) assert is_local_file(file_path) file_name = os.path.basename(file_path) file_object = open(file_path, 'rb') content_type = mimetypes.guess_type(file_name)[0] or 'text/plain' return EncodableFile(file_name=file_name, file_object=file_object, content_type=content_type)
python
{ "resource": "" }
q271381
check_type
test
def check_type(o, acceptable_types, may_be_none=True): """Object is an instance of one of the acceptable types or None. Args: o: The object to be inspected. acceptable_types: A type or tuple of acceptable types. may_be_none(bool): Whether or not the object may be None. Raises: TypeError: If the object is None and may_be_none=False, or if the object is not an instance of one of the acceptable types. """ if not isinstance(acceptable_types, tuple): acceptable_types = (acceptable_types,) if may_be_none and o is None: # Object is None, and that is OK! pass elif isinstance(o, acceptable_types): # Object is an instance of an acceptable type. pass else: # Object is something else. error_message = ( "We were expecting to receive an instance of one of the following " "types: {types}{none}; but instead we received {o} which is a " "{o_type}.".format( types=", ".join([repr(t.__name__) for t in acceptable_types]), none="or 'None'" if may_be_none else "", o=o, o_type=repr(type(o).__name__) ) ) raise TypeError(error_message)
python
{ "resource": "" }
q271382
dict_from_items_with_values
test
def dict_from_items_with_values(*dictionaries, **items): """Creates a dict with the inputted items; pruning any that are `None`. Args: *dictionaries(dict): Dictionaries of items to be pruned and included. **items: Items to be pruned and included. Returns: dict: A dictionary containing all of the items with a 'non-None' value. """ dict_list = list(dictionaries) dict_list.append(items) result = {} for d in dict_list: for key, value in d.items(): if value is not None: result[key] = value return result
python
{ "resource": "" }
q271383
check_response_code
test
def check_response_code(response, expected_response_code): """Check response code against the expected code; raise ApiError. Checks the requests.response.status_code against the provided expected response code (erc), and raises a ApiError if they do not match. Args: response(requests.response): The response object returned by a request using the requests package. expected_response_code(int): The expected response code (HTTP response code). Raises: ApiError: If the requests.response.status_code does not match the provided expected response code (erc). """ if response.status_code == expected_response_code: pass elif response.status_code == RATE_LIMIT_RESPONSE_CODE: raise RateLimitError(response) else: raise ApiError(response)
python
{ "resource": "" }
q271384
json_dict
test
def json_dict(json_data): """Given a dictionary or JSON string; return a dictionary. Args: json_data(dict, str): Input JSON object. Returns: A Python dictionary with the contents of the JSON object. Raises: TypeError: If the input object is not a dictionary or string. """ if isinstance(json_data, dict): return json_data elif isinstance(json_data, basestring): return json.loads(json_data, object_hook=OrderedDict) else: raise TypeError( "'json_data' must be a dictionary or valid JSON string; " "received: {!r}".format(json_data) )
python
{ "resource": "" }
q271385
WebexTeamsDateTime.strptime
test
def strptime(cls, date_string, format=WEBEX_TEAMS_DATETIME_FORMAT): """strptime with the Webex Teams DateTime format as the default.""" return super(WebexTeamsDateTime, cls).strptime( date_string, format ).replace(tzinfo=ZuluTimeZone())
python
{ "resource": "" }
q271386
RoomsAPI.list
test
def list(self, teamId=None, type=None, sortBy=None, max=None, **request_parameters): """List rooms. By default, lists rooms to which the authenticated user belongs. This method supports Webex Teams's implementation of RFC5988 Web Linking to provide pagination support. It returns a generator container that incrementally yields all rooms returned by the query. The generator will automatically request additional 'pages' of responses from Webex as needed until all responses have been returned. The container makes the generator safe for reuse. A new API call will be made, using the same parameters that were specified when the generator was created, every time a new iterator is requested from the container. Args: teamId(basestring): Limit the rooms to those associated with a team, by ID. type(basestring): 'direct' returns all 1-to-1 rooms. `group` returns all group rooms. If not specified or values not matched, will return all room types. sortBy(basestring): Sort results by room ID (`id`), most recent activity (`lastactivity`), or most recently created (`created`). max(int): Limit the maximum number of items returned from the Webex Teams service per request. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: GeneratorContainer: A GeneratorContainer which, when iterated, yields the rooms returned by the Webex Teams query. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(teamId, basestring) check_type(type, basestring) check_type(sortBy, basestring) check_type(max, int) params = dict_from_items_with_values( request_parameters, teamId=teamId, type=type, sortBy=sortBy, max=max, ) # API request - get items items = self._session.get_items(API_ENDPOINT, params=params) # Yield room objects created from the returned items JSON objects for item in items: yield self._object_factory(OBJECT_TYPE, item)
python
{ "resource": "" }
q271387
RoomsAPI.create
test
def create(self, title, teamId=None, **request_parameters): """Create a room. The authenticated user is automatically added as a member of the room. Args: title(basestring): A user-friendly name for the room. teamId(basestring): The team ID with which this room is associated. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Room: A Room with the details of the created room. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(title, basestring) check_type(teamId, basestring) post_data = dict_from_items_with_values( request_parameters, title=title, teamId=teamId, ) # API request json_data = self._session.post(API_ENDPOINT, json=post_data) # Return a room object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271388
RoomsAPI.update
test
def update(self, roomId, title=None, **request_parameters): """Update details for a room, by ID. Args: roomId(basestring): The room ID. title(basestring): A user-friendly name for the room. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Room: A Room object with the updated Webex Teams room details. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(roomId, basestring, may_be_none=False) check_type(roomId, basestring) put_data = dict_from_items_with_values( request_parameters, title=title, ) # API request json_data = self._session.put(API_ENDPOINT + '/' + roomId, json=put_data) # Return a room object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271389
RoomsAPI.delete
test
def delete(self, roomId): """Delete a room. Args: roomId(basestring): The ID of the room to be deleted. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(roomId, basestring, may_be_none=False) # API request self._session.delete(API_ENDPOINT + '/' + roomId)
python
{ "resource": "" }
q271390
LicensesAPI.list
test
def list(self, orgId=None, **request_parameters): """List all licenses for a given organization. If no orgId is specified, the default is the organization of the authenticated user. Args: orgId(basestring): Specify the organization, by ID. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: GeneratorContainer: A GeneratorContainer which, when iterated, yields the licenses returned by the Webex Teams query. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(orgId, basestring) params = dict_from_items_with_values( request_parameters, orgId=orgId, ) # API request - get items items = self._session.get_items(API_ENDPOINT, params=params) # Yield license objects created from the returned JSON objects for item in items: yield self._object_factory(OBJECT_TYPE, item)
python
{ "resource": "" }
q271391
WebhookBasicPropertiesMixin.created
test
def created(self): """Creation date and time in ISO8601 format.""" created = self._json_data.get('created') if created: return WebexTeamsDateTime.strptime(created) else: return None
python
{ "resource": "" }
q271392
_get_access_token
test
def _get_access_token(): """Attempt to get the access token from the environment. Try using the current and legacy environment variables. If the access token is found in a legacy environment variable, raise a deprecation warning. Returns: The access token found in the environment (str), or None. """ access_token = os.environ.get(ACCESS_TOKEN_ENVIRONMENT_VARIABLE) if access_token: return access_token else: for access_token_variable in LEGACY_ACCESS_TOKEN_ENVIRONMENT_VARIABLES: access_token = os.environ.get(access_token_variable) if access_token: env_var_deprecation_warning = PendingDeprecationWarning( "Use of the `{legacy}` environment variable will be " "deprecated in the future. Please update your " "environment(s) to use the new `{new}` environment " "variable.".format( legacy=access_token, new=ACCESS_TOKEN_ENVIRONMENT_VARIABLE, ) ) warnings.warn(env_var_deprecation_warning) return access_token
python
{ "resource": "" }
q271393
WebhooksAPI.create
test
def create(self, name, targetUrl, resource, event, filter=None, secret=None, **request_parameters): """Create a webhook. Args: name(basestring): A user-friendly name for this webhook. targetUrl(basestring): The URL that receives POST requests for each event. resource(basestring): The resource type for the webhook. event(basestring): The event type for the webhook. filter(basestring): The filter that defines the webhook scope. secret(basestring): The secret used to generate payload signature. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Webhook: A Webhook object with the details of the created webhook. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(name, basestring, may_be_none=False) check_type(targetUrl, basestring, may_be_none=False) check_type(resource, basestring, may_be_none=False) check_type(event, basestring, may_be_none=False) check_type(filter, basestring) check_type(secret, basestring) post_data = dict_from_items_with_values( request_parameters, name=name, targetUrl=targetUrl, resource=resource, event=event, filter=filter, secret=secret, ) # API request json_data = self._session.post(API_ENDPOINT, json=post_data) # Return a webhook object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271394
WebhooksAPI.update
test
def update(self, webhookId, name=None, targetUrl=None, **request_parameters): """Update a webhook, by ID. Args: webhookId(basestring): The webhook ID. name(basestring): A user-friendly name for this webhook. targetUrl(basestring): The URL that receives POST requests for each event. **request_parameters: Additional request parameters (provides support for parameters that may be added in the future). Returns: Webhook: A Webhook object with the updated Webex Teams webhook details. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(webhookId, basestring, may_be_none=False) check_type(name, basestring) check_type(targetUrl, basestring) put_data = dict_from_items_with_values( request_parameters, name=name, targetUrl=targetUrl, ) # API request json_data = self._session.put(API_ENDPOINT + '/' + webhookId, json=put_data) # Return a webhook object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
python
{ "resource": "" }
q271395
WebhooksAPI.delete
test
def delete(self, webhookId): """Delete a webhook, by ID. Args: webhookId(basestring): The ID of the webhook to be deleted. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(webhookId, basestring, may_be_none=False) # API request self._session.delete(API_ENDPOINT + '/' + webhookId)
python
{ "resource": "" }
q271396
_fix_next_url
test
def _fix_next_url(next_url): """Remove max=null parameter from URL. Patch for Webex Teams Defect: 'next' URL returned in the Link headers of the responses contain an errant 'max=null' parameter, which causes the next request (to this URL) to fail if the URL is requested as-is. This patch parses the next_url to remove the max=null parameter. Args: next_url(basestring): The 'next' URL to be parsed and cleaned. Returns: basestring: The clean URL to be used for the 'next' request. Raises: AssertionError: If the parameter types are incorrect. ValueError: If 'next_url' does not contain a valid API endpoint URL (scheme, netloc and path). """ next_url = str(next_url) parsed_url = urllib.parse.urlparse(next_url) if not parsed_url.scheme or not parsed_url.netloc or not parsed_url.path: raise ValueError( "'next_url' must be a valid API endpoint URL, minimally " "containing a scheme, netloc and path." ) if parsed_url.query: query_list = parsed_url.query.split('&') if 'max=null' in query_list: query_list.remove('max=null') warnings.warn("`max=null` still present in next-URL returned " "from Webex Teams", RuntimeWarning) new_query = '&'.join(query_list) parsed_url = list(parsed_url) parsed_url[4] = new_query return urllib.parse.urlunparse(parsed_url)
python
{ "resource": "" }
q271397
RestSession.wait_on_rate_limit
test
def wait_on_rate_limit(self, value): """Enable or disable automatic rate-limit handling.""" check_type(value, bool, may_be_none=False) self._wait_on_rate_limit = value
python
{ "resource": "" }
q271398
RestSession.update_headers
test
def update_headers(self, headers): """Update the HTTP headers used for requests in this session. Note: Updates provided by the dictionary passed as the `headers` parameter to this method are merged into the session headers by adding new key-value pairs and/or updating the values of existing keys. The session headers are not replaced by the provided dictionary. Args: headers(dict): Updates to the current session headers. """ check_type(headers, dict, may_be_none=False) self._req_session.headers.update(headers)
python
{ "resource": "" }
q271399
RestSession.abs_url
test
def abs_url(self, url): """Given a relative or absolute URL; return an absolute URL. Args: url(basestring): A relative or absolute URL. Returns: str: An absolute URL. """ parsed_url = urllib.parse.urlparse(url) if not parsed_url.scheme and not parsed_url.netloc: # url is a relative URL; combine with base_url return urllib.parse.urljoin(str(self.base_url), str(url)) else: # url is already an absolute URL; return as is return url
python
{ "resource": "" }