_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q271500
QueueProcessor._handle_auth
test
def _handle_auth(self, dtype, data, ts): """Handles authentication responses. :param dtype: :param data: :param ts: :return: """ # Contains keys status, chanId, userId, caps if dtype == 'unauth': raise NotImplementedError channel_id = data.pop('chanId') user_id = data.pop('userId') identifier = ('auth', user_id) self.channel_handlers[identifier] = channel_id self.channel_directory[identifier] = channel_id self.channel_directory[channel_id] = identifier
python
{ "resource": "" }
q271501
QueueProcessor._handle_conf
test
def _handle_conf(self, dtype, data, ts): """Handles configuration messages. :param dtype: :param data: :param ts: :return: """ self.log.debug("_handle_conf: %s - %s - %s", dtype, data, ts) self.log.info("Configuration accepted: %s", dtype) return
python
{ "resource": "" }
q271502
QueueProcessor.update_timestamps
test
def update_timestamps(self, chan_id, ts): """Updates the timestamp for the given channel id. :param chan_id: :param ts: :return: """ try: self.last_update[chan_id] = ts except KeyError: self.log.warning("Attempted ts update of channel %s, but channel " "not present anymore.", self.channel_directory[chan_id])
python
{ "resource": "" }
q271503
BtfxWss.reset
test
def reset(self): """Reset the client. :return: """ self.conn.reconnect() while not self.conn.connected.is_set(): log.info("reset(): Waiting for connection to be set up..") time.sleep(1) for key in self.channel_configs: self.conn.send(**self.channel_configs[key])
python
{ "resource": "" }
q271504
BtfxWss.candles
test
def candles(self, pair, timeframe=None): """Return a queue containing all received candles data. :param pair: str, Symbol pair to request data for :param timeframe: str :return: Queue() """ timeframe = '1m' if not timeframe else timeframe key = ('candles', pair, timeframe) return self.queue_processor.candles[key]
python
{ "resource": "" }
q271505
BtfxWss.config
test
def config(self, decimals_as_strings=True, ts_as_dates=False, sequencing=False, ts=False, **kwargs): """Send configuration to websocket server :param decimals_as_strings: bool, turn on/off decimals as strings :param ts_as_dates: bool, decide to request timestamps as dates instead :param sequencing: bool, turn on sequencing :param ts: bool, request the timestamp to be appended to every array sent by the server :param kwargs: :return: """ flags = 0 if decimals_as_strings: flags += 8 if ts_as_dates: flags += 32 if ts: flags += 32768 if sequencing: flags += 65536 q = {'event': 'conf', 'flags': flags} q.update(kwargs) self.conn.bitfinex_config = q self.conn.send(**q)
python
{ "resource": "" }
q271506
BtfxWss.subscribe_to_ticker
test
def subscribe_to_ticker(self, pair, **kwargs): """Subscribe to the passed pair's ticker channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('ticker', pair) self._subscribe('ticker', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271507
BtfxWss.unsubscribe_from_ticker
test
def unsubscribe_from_ticker(self, pair, **kwargs): """Unsubscribe to the passed pair's ticker channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('ticker', pair) self._unsubscribe('ticker', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271508
BtfxWss.subscribe_to_order_book
test
def subscribe_to_order_book(self, pair, **kwargs): """Subscribe to the passed pair's order book channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('book', pair) self._subscribe('book', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271509
BtfxWss.unsubscribe_from_order_book
test
def unsubscribe_from_order_book(self, pair, **kwargs): """Unsubscribe to the passed pair's order book channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('book', pair) self._unsubscribe('book', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271510
BtfxWss.subscribe_to_raw_order_book
test
def subscribe_to_raw_order_book(self, pair, prec=None, **kwargs): """Subscribe to the passed pair's raw order book channel. :param pair: str, Symbol pair to request data for :param prec: :param kwargs: :return: """ identifier = ('raw_book', pair) prec = 'R0' if prec is None else prec self._subscribe('book', identifier, pair=pair, prec=prec, **kwargs)
python
{ "resource": "" }
q271511
BtfxWss.unsubscribe_from_raw_order_book
test
def unsubscribe_from_raw_order_book(self, pair, prec=None, **kwargs): """Unsubscribe to the passed pair's raw order book channel. :param pair: str, Symbol pair to request data for :param prec: :param kwargs: :return: """ identifier = ('raw_book', pair) prec = 'R0' if prec is None else prec self._unsubscribe('book', identifier, pair=pair, prec=prec, **kwargs)
python
{ "resource": "" }
q271512
BtfxWss.subscribe_to_trades
test
def subscribe_to_trades(self, pair, **kwargs): """Subscribe to the passed pair's trades channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('trades', pair) self._subscribe('trades', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271513
BtfxWss.unsubscribe_from_trades
test
def unsubscribe_from_trades(self, pair, **kwargs): """Unsubscribe to the passed pair's trades channel. :param pair: str, Symbol pair to request data for :param kwargs: :return: """ identifier = ('trades', pair) self._unsubscribe('trades', identifier, symbol=pair, **kwargs)
python
{ "resource": "" }
q271514
BtfxWss.subscribe_to_candles
test
def subscribe_to_candles(self, pair, timeframe=None, **kwargs): """Subscribe to the passed pair's OHLC data channel. :param pair: str, Symbol pair to request data for :param timeframe: str, {1m, 5m, 15m, 30m, 1h, 3h, 6h, 12h, 1D, 7D, 14D, 1M} :param kwargs: :return: """ valid_tfs = ['1m', '5m', '15m', '30m', '1h', '3h', '6h', '12h', '1D', '7D', '14D', '1M'] if timeframe: if timeframe not in valid_tfs: raise ValueError("timeframe must be any of %s" % valid_tfs) else: timeframe = '1m' identifier = ('candles', pair, timeframe) pair = 't' + pair if not pair.startswith('t') else pair key = 'trade:' + timeframe + ':' + pair self._subscribe('candles', identifier, key=key, **kwargs)
python
{ "resource": "" }
q271515
BtfxWss.unsubscribe_from_candles
test
def unsubscribe_from_candles(self, pair, timeframe=None, **kwargs): """Unsubscribe to the passed pair's OHLC data channel. :param timeframe: str, {1m, 5m, 15m, 30m, 1h, 3h, 6h, 12h, 1D, 7D, 14D, 1M} :param kwargs: :return: """ valid_tfs = ['1m', '5m', '15m', '30m', '1h', '3h', '6h', '12h', '1D', '7D', '14D', '1M'] if timeframe: if timeframe not in valid_tfs: raise ValueError("timeframe must be any of %s" % valid_tfs) else: timeframe = '1m' identifier = ('candles', pair, timeframe) pair = 't' + pair if not pair.startswith('t') else pair key = 'trade:' + timeframe + ':' + pair self._unsubscribe('candles', identifier, key=key, **kwargs)
python
{ "resource": "" }
q271516
BtfxWss.authenticate
test
def authenticate(self): """Authenticate with the Bitfinex API. :return: """ if not self.key and not self.secret: raise ValueError("Must supply both key and secret key for API!") self.channel_configs['auth'] = {'api_key': self.key, 'secret': self.secret} self.conn.send(api_key=self.key, secret=self.secret, auth=True)
python
{ "resource": "" }
q271517
BtfxWss.cancel_order
test
def cancel_order(self, multi=False, **order_identifiers): """Cancel one or multiple orders via Websocket. :param multi: bool, whether order_settings contains settings for one, or multiples orders :param order_identifiers: Identifiers for the order(s) you with to cancel :return: """ if multi: self._send_auth_command('oc_multi', order_identifiers) else: self._send_auth_command('oc', order_identifiers)
python
{ "resource": "" }
q271518
GatewayClient._onCommand
test
def _onCommand(self, client, userdata, pahoMessage): """ Internal callback for device command messages, parses source device from topic string and passes the information on to the registered device command callback """ try: command = Command(pahoMessage, self._messageCodecs) except InvalidEventException as e: self.logger.critical(str(e)) else: self.logger.debug("Received device command '%s'" % (command.command)) if self.commandCallback: self.commandCallback(command)
python
{ "resource": "" }
q271519
GatewayClient._onDeviceCommand
test
def _onDeviceCommand(self, client, userdata, pahoMessage): """ Internal callback for gateway command messages, parses source device from topic string and passes the information on to the registered device command callback """ try: command = Command(pahoMessage, self._messageCodecs) except InvalidEventException as e: self.logger.critical(str(e)) else: self.logger.debug("Received gateway command '%s'" % (command.command)) if self.deviceCommandCallback: self.deviceCommandCallback(command)
python
{ "resource": "" }
q271520
GatewayClient._onMessageNotification
test
def _onMessageNotification(self, client, userdata, pahoMessage): """ Internal callback for gateway notification messages, parses source device from topic string and passes the information on to the registered device command callback """ try: note = Notification(pahoMessage, self._messageCodecs) except InvalidEventException as e: self.logger.critical(str(e)) else: self.logger.debug("Received Notification") if self.notificationCallback: self.notificationCallback(note)
python
{ "resource": "" }
q271521
DeviceTypes.create
test
def create(self, deviceType): """ Register one or more new device types, each request can contain a maximum of 512KB. """ r = self._apiClient.post("api/v0002/device/types", deviceType) if r.status_code == 201: return DeviceType(apiClient=self._apiClient, **r.json()) else: raise ApiException(r)
python
{ "resource": "" }
q271522
DeviceClient.publishEvent
test
def publishEvent(self, event, msgFormat, data, qos=0, on_publish=None): """ Publish an event to Watson IoT Platform. # Parameters event (string): Name of this event msgFormat (string): Format of the data for this event data (dict): Data for this event qos (int): MQTT quality of service level to use (`0`, `1`, or `2`) on_publish(function): A function that will be called when receipt of the publication is confirmed. # Callback and QoS The use of the optional #on_publish function has different implications depending on the level of qos used to publish the event: - qos 0: the client has asynchronously begun to send the event - qos 1 and 2: the client has confirmation of delivery from the platform """ topic = "iot-2/evt/{event}/fmt/{msg_format}".format(event=event, msg_format=msgFormat) return self._publishEvent(topic, event, msgFormat, data, qos, on_publish)
python
{ "resource": "" }
q271523
Devices.update
test
def update(self, deviceUid, metadata=None, deviceInfo=None, status=None): """ Update an existing device """ if not isinstance(deviceUid, DeviceUid) and isinstance(deviceUid, dict): deviceUid = DeviceUid(**deviceUid) deviceUrl = "api/v0002/device/types/%s/devices/%s" % (deviceUid.typeId, deviceUid.deviceId) data = {"status": status, "deviceInfo": deviceInfo, "metadata": metadata} r = self._apiClient.put(deviceUrl, data) if r.status_code == 200: return Device(apiClient=self._apiClient, **r.json()) else: raise ApiException(r)
python
{ "resource": "" }
q271524
ConnectionStatus.find
test
def find(self, status=None, connectedAfter=None): """ Iterate through all Connectors """ queryParms = {} if status: queryParms["status"] = status if connectedAfter: queryParms["connectedAfter"] = connectedAfter return IterableClientStatusList(self._apiClient, filters=queryParms)
python
{ "resource": "" }
q271525
MgmtExtensions.list
test
def list(self): """ List all device management extension packages """ url = "api/v0002/mgmt/custom/bundle" r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r)
python
{ "resource": "" }
q271526
MgmtExtensions.create
test
def create(self, dmeData): """ Create a new device management extension package In case of failure it throws APIException """ url = "api/v0002/mgmt/custom/bundle" r = self._apiClient.post(url, dmeData) if r.status_code == 201: return r.json() else: raise ApiException(r)
python
{ "resource": "" }
q271527
updateSchema
test
def updateSchema(self, schemaId, schemaDefinition): """ Update a schema. Throws APIException on failure. """ req = ApiClient.oneSchemaUrl % (self.host, "/draft", schemaId) body = {"schemaDefinition": schemaDefinition} resp = requests.put(req, auth=self.credentials, headers={"Content-Type":"application/json"}, data=json.dumps(body), verify=self.verify) if resp.status_code == 200: self.logger.debug("Schema updated") else: raise ibmiotf.APIException(resp.status_code, "HTTP error updating schema", resp) return resp.json()
python
{ "resource": "" }
q271528
AbstractClient.disconnect
test
def disconnect(self): """ Disconnect the client from IBM Watson IoT Platform """ # self.logger.info("Closing connection to the IBM Watson IoT Platform") self.client.disconnect() # If we don't call loop_stop() it appears we end up with a zombie thread which continues to process # network traffic, preventing any subsequent attempt to reconnect using connect() self.client.loop_stop() self.logger.info("Closed connection to the IBM Watson IoT Platform")
python
{ "resource": "" }
q271529
AbstractClient._onConnect
test
def _onConnect(self, mqttc, userdata, flags, rc): """ Called when the broker responds to our connection request. The value of rc determines success or not: 0: Connection successful 1: Connection refused - incorrect protocol version 2: Connection refused - invalid client identifier 3: Connection refused - server unavailable 4: Connection refused - bad username or password 5: Connection refused - not authorised 6-255: Currently unused. """ if rc == 0: self.connectEvent.set() self.logger.info("Connected successfully: %s" % (self.clientId)) # Restoring previous subscriptions with self._subLock: if len(self._subscriptions) > 0: for subscription in self._subscriptions: # We use the underlying mqttclient subscribe method rather than _subscribe because we are # claiming a lock on the subscriptions list and do not want anything else to modify it, # which that method does (result, mid) = self.client.subscribe(subscription, qos=self._subscriptions[subscription]) if result != paho.MQTT_ERR_SUCCESS: self._logAndRaiseException(ConnectionException("Unable to subscribe to %s" % subscription)) self.logger.debug("Restored %s previous subscriptions" % len(self._subscriptions)) elif rc == 1: self._logAndRaiseException(ConnectionException("Incorrect protocol version")) elif rc == 2: self._logAndRaiseException(ConnectionException("Invalid client identifier")) elif rc == 3: self._logAndRaiseException(ConnectionException("Server unavailable")) elif rc == 4: self._logAndRaiseException( ConnectionException("Bad username or password: (%s, %s)" % (self.username, self.password)) ) elif rc == 5: self._logAndRaiseException( ConnectionException("Not authorized: s (%s, %s, %s)" % (self.clientId, self.username, self.password)) ) else: self._logAndRaiseException(ConnectionException("Unexpected connection failure: %s" % (rc)))
python
{ "resource": "" }
q271530
ApplicationClient.subscribeToDeviceEvents
test
def subscribeToDeviceEvents(self, typeId="+", deviceId="+", eventId="+", msgFormat="+", qos=0): """ Subscribe to device event messages # Parameters typeId (string): typeId for the subscription, optional. Defaults to all device types (MQTT `+` wildcard) deviceId (string): deviceId for the subscription, optional. Defaults to all devices (MQTT `+` wildcard) eventId (string): eventId for the subscription, optional. Defaults to all events (MQTT `+` wildcard) msgFormat (string): msgFormat for the subscription, optional. Defaults to all formats (MQTT `+` wildcard) qos (int): MQTT quality of service level to use (`0`, `1`, or `2`) # Returns int: If the subscription was successful then the return Message ID (mid) for the subscribe request will be returned. The mid value can be used to track the subscribe request by checking against the mid argument if you register a subscriptionCallback method. If the subscription fails then the return value will be `0` """ if self._config.isQuickstart() and deviceId == "+": self.logger.warning( "QuickStart applications do not support wildcard subscription to events from all devices" ) return 0 topic = "iot-2/type/%s/id/%s/evt/%s/fmt/%s" % (typeId, deviceId, eventId, msgFormat) return self._subscribe(topic, qos)
python
{ "resource": "" }
q271531
ApplicationClient.subscribeToDeviceStatus
test
def subscribeToDeviceStatus(self, typeId="+", deviceId="+"): """ Subscribe to device status messages # Parameters typeId (string): typeId for the subscription, optional. Defaults to all device types (MQTT `+` wildcard) deviceId (string): deviceId for the subscription, optional. Defaults to all devices (MQTT `+` wildcard) # Returns int: If the subscription was successful then the return Message ID (mid) for the subscribe request will be returned. The mid value can be used to track the subscribe request by checking against the mid argument if you register a subscriptionCallback method. If the subscription fails then the return value will be `0` """ if self._config.isQuickstart() and deviceId == "+": self.logger.warning("QuickStart applications do not support wildcard subscription to device status") return 0 topic = "iot-2/type/%s/id/%s/mon" % (typeId, deviceId) return self._subscribe(topic, 0)
python
{ "resource": "" }
q271532
ApplicationClient.subscribeToDeviceCommands
test
def subscribeToDeviceCommands(self, typeId="+", deviceId="+", commandId="+", msgFormat="+"): """ Subscribe to device command messages # Parameters typeId (string): typeId for the subscription, optional. Defaults to all device types (MQTT `+` wildcard) deviceId (string): deviceId for the subscription, optional. Defaults to all devices (MQTT `+` wildcard) commandId (string): commandId for the subscription, optional. Defaults to all commands (MQTT `+` wildcard) msgFormat (string): msgFormat for the subscription, optional. Defaults to all formats (MQTT `+` wildcard) qos (int): MQTT quality of service level to use (`0`, `1`, or `2`) # Returns int: If the subscription was successful then the return Message ID (mid) for the subscribe request will be returned. The mid value can be used to track the subscribe request by checking against the mid argument if you register a subscriptionCallback method. If the subscription fails then the return value will be `0` """ if self._config.isQuickstart(): self.logger.warning("QuickStart applications do not support commands") return 0 topic = "iot-2/type/%s/id/%s/cmd/%s/fmt/%s" % (typeId, deviceId, commandId, msgFormat) return self._subscribe(topic, 0)
python
{ "resource": "" }
q271533
ApplicationClient.publishCommand
test
def publishCommand(self, typeId, deviceId, commandId, msgFormat, data=None, qos=0, on_publish=None): """ Publish a command to a device # Parameters typeId (string) : The type of the device this command is to be published to deviceId (string): The id of the device this command is to be published to command (string) : The name of the command msgFormat (string) : The format of the command payload data (dict) : The command data qos (int) : The equivalent MQTT semantics of quality of service using the same constants (optional, defaults to `0`) on_publish (function) : A function that will be called when receipt of the publication is confirmed. This has different implications depending on the qos: - qos 0 : the client has asynchronously begun to send the event - qos 1 and 2 : the client has confirmation of delivery from WIoTP """ if self._config.isQuickstart(): self.logger.warning("QuickStart applications do not support sending commands") return False if not self.connectEvent.wait(timeout=10): return False else: topic = "iot-2/type/%s/id/%s/cmd/%s/fmt/%s" % (typeId, deviceId, commandId, msgFormat) # Raise an exception if there is no codec for this msgFormat if self.getMessageCodec(msgFormat) is None: raise MissingMessageEncoderException(msgFormat) payload = self.getMessageCodec(msgFormat).encode(data, datetime.now()) result = self.client.publish(topic, payload=payload, qos=qos, retain=False) if result[0] == paho.MQTT_ERR_SUCCESS: # Because we are dealing with aync pub/sub model and callbacks it is possible that # the _onPublish() callback for this mid is called before we obtain the lock to place # the mid into the _onPublishCallbacks list. # # _onPublish knows how to handle a scenario where the mid is not present (no nothing) # in this scenario we will need to invoke the callback directly here, because at the time # the callback was invoked the mid was not yet in the list. with self._messagesLock: if result[1] in self._onPublishCallbacks: # paho callback beat this thread so call callback inline now del self._onPublishCallbacks[result[1]] if on_publish is not None: on_publish() else: # this thread beat paho callback so set up for call later self._onPublishCallbacks[result[1]] = on_publish return True else: return False
python
{ "resource": "" }
q271534
ApplicationClient._onUnsupportedMessage
test
def _onUnsupportedMessage(self, client, userdata, message): """ Internal callback for messages that have not been handled by any of the specific internal callbacks, these messages are not passed on to any user provided callback """ self.logger.warning( "Received messaging on unsupported topic '%s' on topic '%s'" % (message.payload, message.topic) )
python
{ "resource": "" }
q271535
ApplicationClient._onDeviceEvent
test
def _onDeviceEvent(self, client, userdata, pahoMessage): """ Internal callback for device event messages, parses source device from topic string and passes the information on to the registerd device event callback """ try: event = Event(pahoMessage, self._messageCodecs) self.logger.debug("Received event '%s' from %s:%s" % (event.eventId, event.typeId, event.deviceId)) if self.deviceEventCallback: self.deviceEventCallback(event) except InvalidEventException as e: self.logger.critical(str(e))
python
{ "resource": "" }
q271536
ApplicationClient._onDeviceStatus
test
def _onDeviceStatus(self, client, userdata, pahoMessage): """ Internal callback for device status messages, parses source device from topic string and passes the information on to the registerd device status callback """ try: status = Status(pahoMessage) self.logger.debug("Received %s action from %s" % (status.action, status.clientId)) if self.deviceStatusCallback: self.deviceStatusCallback(status) except InvalidEventException as e: self.logger.critical(str(e))
python
{ "resource": "" }
q271537
ApplicationClient._onAppStatus
test
def _onAppStatus(self, client, userdata, pahoMessage): """ Internal callback for application command messages, parses source application from topic string and passes the information on to the registerd applicaion status callback """ try: status = Status(pahoMessage) self.logger.debug("Received %s action from %s" % (status.action, status.clientId)) if self.appStatusCallback: self.appStatusCallback(status) except InvalidEventException as e: self.logger.critical(str(e))
python
{ "resource": "" }
q271538
LEC.get
test
def get(self, deviceUid, eventId): """ Retrieves the last cached message for specified event from a specific device. """ if not isinstance(deviceUid, DeviceUid) and isinstance(deviceUid, dict): deviceUid = DeviceUid(**deviceUid) url = "api/v0002/device/types/%s/devices/%s/events/%s" % (deviceUid.typeId, deviceUid.deviceId, eventId) r = self._apiClient.get(url) if r.status_code == 200: return LastEvent(**r.json()) else: raise ApiException(r)
python
{ "resource": "" }
q271539
LEC.getAll
test
def getAll(self, deviceUid): """ Retrieves a list of the last cached message for all events from a specific device. """ if not isinstance(deviceUid, DeviceUid) and isinstance(deviceUid, dict): deviceUid = DeviceUid(**deviceUid) url = "api/v0002/device/types/%s/devices/%s/events" % (deviceUid.typeId, deviceUid.deviceId) r = self._apiClient.get(url) if r.status_code == 200: events = [] for event in r.json(): events.append(LastEvent(**event)) return events else: raise ApiException(r)
python
{ "resource": "" }
q271540
IterableList._makeApiCall
test
def _makeApiCall(self, parameters=None): """ Retrieve bulk devices It accepts accepts a list of parameters In case of failure it throws Exception """ r = self._apiClient.get(self._url, parameters) if r.status_code == 200: return r.json() else: raise Exception("HTTP %s %s" % (r.status_code, r.text))
python
{ "resource": "" }
q271541
MgmtRequests.initiate
test
def initiate(self, request): """ Initiates a device management request, such as reboot. In case of failure it throws APIException """ url = MgmtRequests.mgmtRequests r = self._apiClient.post(url, request) if r.status_code == 202: return r.json() else: raise ApiException(r)
python
{ "resource": "" }
q271542
MgmtRequests.getStatus
test
def getStatus(self, requestId, typeId=None, deviceId=None): """ Get a list of device management request device statuses. Get an individual device mangaement request device status. """ if typeId is None or deviceId is None: url = MgmtRequests.mgmtRequestStatus % (requestId) r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r) else: url = MgmtRequests.mgmtRequestSingleDeviceStatus % (requestId, typeId, deviceId) r = self._apiClient.get(url) if r.status_code == 200: return r.json() else: raise ApiException(r)
python
{ "resource": "" }
q271543
Index.close
test
def close(self): """Force a flush of the index to storage. Renders index inaccessible.""" if self.handle: self.handle.destroy() self.handle = None else: raise IOError("Unclosable index")
python
{ "resource": "" }
q271544
Index.count
test
def count(self, coordinates): """Return number of objects that intersect the given coordinates. :param coordinates: sequence or array This may be an object that satisfies the numpy array protocol, providing the index's dimension * 2 coordinate pairs representing the `mink` and `maxk` coordinates in each dimension defining the bounds of the query window. The following example queries the index for any objects any objects that were stored in the index intersect the bounds given in the coordinates:: >>> from rtree import index >>> idx = index.Index() >>> idx.insert(4321, ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734), ... obj=42) >>> print(idx.count((0, 0, 60, 60))) 1 """ p_mins, p_maxs = self.get_coordinate_pointers(coordinates) p_num_results = ctypes.c_uint64(0) core.rt.Index_Intersects_count(self.handle, p_mins, p_maxs, self.properties.dimension, ctypes.byref(p_num_results)) return p_num_results.value
python
{ "resource": "" }
q271545
Index.nearest
test
def nearest(self, coordinates, num_results=1, objects=False): """Returns the ``k``-nearest objects to the given coordinates. :param coordinates: sequence or array This may be an object that satisfies the numpy array protocol, providing the index's dimension * 2 coordinate pairs representing the `mink` and `maxk` coordinates in each dimension defining the bounds of the query window. :param num_results: integer The number of results to return nearest to the given coordinates. If two index entries are equidistant, *both* are returned. This property means that :attr:`num_results` may return more items than specified :param objects: True / False / 'raw' If True, the nearest method will return index objects that were pickled when they were stored with each index entry, as well as the id and bounds of the index entries. If 'raw', it will return the object as entered into the database without the :class:`rtree.index.Item` wrapper. Example of finding the three items nearest to this one:: >>> from rtree import index >>> idx = index.Index() >>> idx.insert(4321, (34.37, 26.73, 49.37, 41.73), obj=42) >>> hits = idx.nearest((0, 0, 10, 10), 3, objects=True) """ if objects: return self._nearest_obj(coordinates, num_results, objects) p_mins, p_maxs = self.get_coordinate_pointers(coordinates) p_num_results = ctypes.pointer(ctypes.c_uint64(num_results)) it = ctypes.pointer(ctypes.c_int64()) core.rt.Index_NearestNeighbors_id(self.handle, p_mins, p_maxs, self.properties.dimension, ctypes.byref(it), p_num_results) return self._get_ids(it, p_num_results.contents.value)
python
{ "resource": "" }
q271546
Index.get_bounds
test
def get_bounds(self, coordinate_interleaved=None): """Returns the bounds of the index :param coordinate_interleaved: If True, the coordinates are turned in the form [xmin, ymin, ..., kmin, xmax, ymax, ..., kmax], otherwise they are returned as [xmin, xmax, ymin, ymax, ..., ..., kmin, kmax]. If not specified, the :attr:`interleaved` member of the index is used, which defaults to True. """ if coordinate_interleaved is None: coordinate_interleaved = self.interleaved return _get_bounds( self.handle, core.rt.Index_GetBounds, coordinate_interleaved)
python
{ "resource": "" }
q271547
Index.delete
test
def delete(self, id, coordinates): """Deletes items from the index with the given ``'id'`` within the specified coordinates. :param id: long integer A long integer that is the identifier for this index entry. IDs need not be unique to be inserted into the index, and it is up to the user to ensure they are unique if this is a requirement. :param coordinates: sequence or array Dimension * 2 coordinate pairs, representing the min and max coordinates in each dimension of the item to be deleted from the index. Their ordering will depend on the index's :attr:`interleaved` data member. These are not the coordinates of a space containing the item, but those of the item itself. Together with the id parameter, they determine which item will be deleted. This may be an object that satisfies the numpy array protocol. Example:: >>> from rtree import index >>> idx = index.Index() >>> idx.delete(4321, ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734)) """ p_mins, p_maxs = self.get_coordinate_pointers(coordinates) core.rt.Index_DeleteData( self.handle, id, p_mins, p_maxs, self.properties.dimension)
python
{ "resource": "" }
q271548
Index._create_idx_from_stream
test
def _create_idx_from_stream(self, stream): """This function is used to instantiate the index given an iterable stream of data.""" stream_iter = iter(stream) dimension = self.properties.dimension darray = ctypes.c_double * dimension mins = darray() maxs = darray() no_data = ctypes.cast(ctypes.pointer(ctypes.c_ubyte(0)), ctypes.POINTER(ctypes.c_ubyte)) def py_next_item(p_id, p_mins, p_maxs, p_dimension, p_data, p_length): """This function must fill pointers to individual entries that will be added to the index. The C API will actually call this function to fill out the pointers. If this function returns anything other than 0, it is assumed that the stream of data is done.""" try: p_id[0], coordinates, obj = next(stream_iter) except StopIteration: # we're done return -1 except Exception as exc: self._exception = exc return -1 if self.interleaved: coordinates = Index.deinterleave(coordinates) # this code assumes the coords are not interleaved. # xmin, xmax, ymin, ymax, zmin, zmax for i in range(dimension): mins[i] = coordinates[i*2] maxs[i] = coordinates[(i*2)+1] p_mins[0] = ctypes.cast(mins, ctypes.POINTER(ctypes.c_double)) p_maxs[0] = ctypes.cast(maxs, ctypes.POINTER(ctypes.c_double)) # set the dimension p_dimension[0] = dimension if obj is None: p_data[0] = no_data p_length[0] = 0 else: p_length[0], data, _ = self._serialize(obj) p_data[0] = ctypes.cast(data, ctypes.POINTER(ctypes.c_ubyte)) return 0 stream = core.NEXTFUNC(py_next_item) return IndexStreamHandle(self.properties.handle, stream)
python
{ "resource": "" }
q271549
CustomStorage.loadByteArray
test
def loadByteArray(self, page, returnError): """Must be overridden. Must return a string with the loaded data.""" returnError.contents.value = self.IllegalStateError raise NotImplementedError("You must override this method.") return ''
python
{ "resource": "" }
q271550
RtreeContainer.delete
test
def delete(self, obj, coordinates): """Deletes the item from the container within the specified coordinates. :param obj: object Any object. :param coordinates: sequence or array Dimension * 2 coordinate pairs, representing the min and max coordinates in each dimension of the item to be deleted from the index. Their ordering will depend on the index's :attr:`interleaved` data member. These are not the coordinates of a space containing the item, but those of the item itself. Together with the id parameter, they determine which item will be deleted. This may be an object that satisfies the numpy array protocol. Example:: >>> from rtree import index >>> idx = index.RtreeContainer() >>> idx.delete(object(), ... (34.3776829412, 26.7375853734, 49.3776829412, ... 41.7375853734)) Traceback (most recent call last): ... IndexError: object is not in the index """ try: count = self._objects[id(obj)] - 1 except KeyError: raise IndexError('object is not in the index') if count == 0: del self._objects[obj] else: self._objects[id(obj)] = (count, obj) return super(RtreeContainer, self).delete(id, coordinates)
python
{ "resource": "" }
q271551
check_return
test
def check_return(result, func, cargs): "Error checking for Error calls" if result != 0: s = rt.Error_GetLastErrorMsg().decode() msg = 'LASError in "%s": %s' % \ (func.__name__, s) rt.Error_Reset() raise RTreeError(msg) return True
python
{ "resource": "" }
q271552
WSGIApp.load
test
def load(self): """ Attempt an import of the specified application """ if isinstance(self.application, str): return util.import_app(self.application) else: return self.application
python
{ "resource": "" }
q271553
Common.init_app
test
def init_app(self, app): """Initializes the Flask application with Common.""" if not hasattr(app, 'extensions'): app.extensions = {} if 'common' in app.extensions: raise RuntimeError("Flask-Common extension already initialized") app.extensions['common'] = self self.app = app if 'COMMON_FILESERVER_DISABLED' not in app.config: with app.test_request_context(): # Configure WhiteNoise. app.wsgi_app = WhiteNoise(app.wsgi_app, root=url_for('static', filename='')[1:]) self.cache = Cache(app, config={'CACHE_TYPE': app.config.get("COMMON_CACHE_TYPE", 'simple')}) @app.before_request def before_request_callback(): request.start_time = maya.now() @app.after_request def after_request_callback(response): if 'COMMON_POWERED_BY_DISABLED' not in current_app.config: response.headers['X-Powered-By'] = 'Flask' if 'COMMON_PROCESSED_TIME_DISABLED' not in current_app.config: response.headers['X-Processed-Time'] = maya.now().epoch - request.start_time.epoch return response @app.route('/favicon.ico') def favicon(): return redirect(url_for('static', filename='favicon.ico'), code=301)
python
{ "resource": "" }
q271554
Common.serve
test
def serve(self, workers=None, **kwargs): """Serves the Flask application.""" if self.app.debug: print(crayons.yellow('Booting Flask development server...')) self.app.run() else: print(crayons.yellow('Booting Gunicorn...')) # Start the web server. server = GunicornServer( self.app, workers=workers or number_of_gunicorn_workers(), worker_class='egg:meinheld#gunicorn_worker', **kwargs) server.run()
python
{ "resource": "" }
q271555
VersatileImageFieldSerializer.to_native
test
def to_native(self, value): """For djangorestframework <=2.3.14""" context_request = None if self.context: context_request = self.context.get('request', None) return build_versatileimagefield_url_set( value, self.sizes, request=context_request )
python
{ "resource": "" }
q271556
CroppedImage.crop_on_centerpoint
test
def crop_on_centerpoint(self, image, width, height, ppoi=(0.5, 0.5)): """ Return a PIL Image instance cropped from `image`. Image has an aspect ratio provided by dividing `width` / `height`), sized down to `width`x`height`. Any 'excess pixels' are trimmed away in respect to the pixel of `image` that corresponds to `ppoi` (Primary Point of Interest). `image`: A PIL Image instance `width`: Integer, width of the image to return (in pixels) `height`: Integer, height of the image to return (in pixels) `ppoi`: A 2-tuple of floats with values greater than 0 and less than 1 These values are converted into a cartesian coordinate that signifies the 'center pixel' which the crop will center on (to trim the excess from the 'long side'). Determines whether to trim away pixels from either the left/right or top/bottom sides by comparing the aspect ratio of `image` vs the aspect ratio of `width`x`height`. Will trim from the left/right sides if the aspect ratio of `image` is greater-than-or-equal-to the aspect ratio of `width`x`height`. Will trim from the top/bottom sides if the aspect ration of `image` is less-than the aspect ratio or `width`x`height`. Similar to Kevin Cazabon's ImageOps.fit method but uses the ppoi value as an absolute centerpoint (as opposed as a percentage to trim off the 'long sides'). """ ppoi_x_axis = int(image.size[0] * ppoi[0]) ppoi_y_axis = int(image.size[1] * ppoi[1]) center_pixel_coord = (ppoi_x_axis, ppoi_y_axis) # Calculate the aspect ratio of `image` orig_aspect_ratio = float( image.size[0] ) / float( image.size[1] ) crop_aspect_ratio = float(width) / float(height) # Figure out if we're trimming from the left/right or top/bottom if orig_aspect_ratio >= crop_aspect_ratio: # `image` is wider than what's needed, # crop from left/right sides orig_crop_width = int( (crop_aspect_ratio * float(image.size[1])) + 0.5 ) orig_crop_height = image.size[1] crop_boundary_top = 0 crop_boundary_bottom = orig_crop_height crop_boundary_left = center_pixel_coord[0] - (orig_crop_width // 2) crop_boundary_right = crop_boundary_left + orig_crop_width if crop_boundary_left < 0: crop_boundary_left = 0 crop_boundary_right = crop_boundary_left + orig_crop_width elif crop_boundary_right > image.size[0]: crop_boundary_right = image.size[0] crop_boundary_left = image.size[0] - orig_crop_width else: # `image` is taller than what's needed, # crop from top/bottom sides orig_crop_width = image.size[0] orig_crop_height = int( (float(image.size[0]) / crop_aspect_ratio) + 0.5 ) crop_boundary_left = 0 crop_boundary_right = orig_crop_width crop_boundary_top = center_pixel_coord[1] - (orig_crop_height // 2) crop_boundary_bottom = crop_boundary_top + orig_crop_height if crop_boundary_top < 0: crop_boundary_top = 0 crop_boundary_bottom = crop_boundary_top + orig_crop_height elif crop_boundary_bottom > image.size[1]: crop_boundary_bottom = image.size[1] crop_boundary_top = image.size[1] - orig_crop_height # Cropping the image from the original image cropped_image = image.crop( ( crop_boundary_left, crop_boundary_top, crop_boundary_right, crop_boundary_bottom ) ) # Resizing the newly cropped image to the size specified # (as determined by `width`x`height`) return cropped_image.resize( (width, height), Image.ANTIALIAS )
python
{ "resource": "" }
q271557
CroppedImage.process_image
test
def process_image(self, image, image_format, save_kwargs, width, height): """ Return a BytesIO instance of `image` cropped to `width` and `height`. Cropping will first reduce an image down to its longest side and then crop inwards centered on the Primary Point of Interest (as specified by `self.ppoi`) """ imagefile = BytesIO() palette = image.getpalette() cropped_image = self.crop_on_centerpoint( image, width, height, self.ppoi ) # Using ImageOps.fit on GIFs can introduce issues with their palette # Solution derived from: http://stackoverflow.com/a/4905209/1149774 if image_format == 'GIF': cropped_image.putpalette(palette) cropped_image.save( imagefile, **save_kwargs ) return imagefile
python
{ "resource": "" }
q271558
ThumbnailImage.process_image
test
def process_image(self, image, image_format, save_kwargs, width, height): """ Return a BytesIO instance of `image` that fits in a bounding box. Bounding box dimensions are `width`x`height`. """ imagefile = BytesIO() image.thumbnail( (width, height), Image.ANTIALIAS ) image.save( imagefile, **save_kwargs ) return imagefile
python
{ "resource": "" }
q271559
InvertImage.process_image
test
def process_image(self, image, image_format, save_kwargs={}): """Return a BytesIO instance of `image` with inverted colors.""" imagefile = BytesIO() inv_image = ImageOps.invert(image) inv_image.save( imagefile, **save_kwargs ) return imagefile
python
{ "resource": "" }
q271560
VersatileImageFormField.to_python
test
def to_python(self, data): """Ensure data is prepped properly before handing off to ImageField.""" if data is not None: if hasattr(data, 'open'): data.open() return super(VersatileImageFormField, self).to_python(data)
python
{ "resource": "" }
q271561
VersatileImageField.process_placeholder_image
test
def process_placeholder_image(self): """ Process the field's placeholder image. Ensures the placeholder image has been saved to the same storage class as the field in a top level folder with a name specified by settings.VERSATILEIMAGEFIELD_SETTINGS['placeholder_directory_name'] This should be called by the VersatileImageFileDescriptor __get__. If self.placeholder_image_name is already set it just returns right away. """ if self.placeholder_image_name: return placeholder_image_name = None placeholder_image = self.placeholder_image if placeholder_image: if isinstance(placeholder_image, OnStoragePlaceholderImage): name = placeholder_image.path else: name = placeholder_image.image_data.name placeholder_image_name = os.path.join( VERSATILEIMAGEFIELD_PLACEHOLDER_DIRNAME, name ) if not self.storage.exists(placeholder_image_name): self.storage.save( placeholder_image_name, placeholder_image.image_data ) self.placeholder_image_name = placeholder_image_name
python
{ "resource": "" }
q271562
VersatileImageField.pre_save
test
def pre_save(self, model_instance, add): """Return field's value just before saving.""" file = super(VersatileImageField, self).pre_save(model_instance, add) self.update_ppoi_field(model_instance) return file
python
{ "resource": "" }
q271563
VersatileImageField.update_ppoi_field
test
def update_ppoi_field(self, instance, *args, **kwargs): """ Update field's ppoi field, if defined. This method is hooked up this field's pre_save method to update the ppoi immediately before the model instance (`instance`) it is associated with is saved. This field's ppoi can be forced to update with force=True, which is how VersatileImageField.pre_save calls this method. """ # Nothing to update if the field doesn't have have a ppoi # dimension field. if not self.ppoi_field: return # getattr will call the VersatileImageFileDescriptor's __get__ method, # which coerces the assigned value into an instance of # self.attr_class(VersatileImageFieldFile in this case). file = getattr(instance, self.attname) # file should be an instance of VersatileImageFieldFile or should be # None. ppoi = None if file and not isinstance(file, tuple): if hasattr(file, 'ppoi'): ppoi = file.ppoi # Update the ppoi field. if self.ppoi_field: setattr(instance, self.ppoi_field, ppoi)
python
{ "resource": "" }
q271564
VersatileImageField.save_form_data
test
def save_form_data(self, instance, data): """ Handle data sent from MultiValueField forms that set ppoi values. `instance`: The model instance that is being altered via a form `data`: The data sent from the form to this field which can be either: * `None`: This is unset data from an optional field * A two-position tuple: (image_form_data, ppoi_data) * `image_form-data` options: * `None` the file for this field is unchanged * `False` unassign the file form the field * `ppoi_data` data structure: * `%(x_coordinate)sx%(y_coordinate)s': The ppoi data to assign to the unchanged file """ to_assign = data if data and isinstance(data, tuple): # This value is coming from a MultiValueField if data[0] is None: # This means the file hasn't changed but we need to # update the ppoi current_field = getattr(instance, self.name) if data[1]: current_field.ppoi = data[1] to_assign = current_field elif data[0] is False: # This means the 'Clear' checkbox was checked so we # need to empty the field to_assign = '' else: # This means there is a new upload so we need to unpack # the tuple and assign the first position to the field # attribute to_assign = data[0] super(VersatileImageField, self).save_form_data(instance, to_assign)
python
{ "resource": "" }
q271565
VersatileImageField.formfield
test
def formfield(self, **kwargs): """Return a formfield.""" # This is a fairly standard way to set up some defaults # while letting the caller override them. defaults = {} if self.ppoi_field: defaults['form_class'] = SizedImageCenterpointClickDjangoAdminField if kwargs.get('widget') is AdminFileWidget: # Ensuring default admin widget is skipped (in favor of using # SizedImageCenterpointClickDjangoAdminField's default widget as # the default widget choice for use in the admin). # This is for two reasons: # 1. To prevent 'typical' admin users (those who want to use # the PPOI 'click' widget by default) from having to # specify a formfield_overrides for each ModelAdmin class # used by each model that has a VersatileImageField. # 2. If a VersatileImageField does not have a ppoi_field specified # it will 'fall back' to a ClearableFileInput anyways. # If admin users do, in fact, want to force use of the # AdminFileWidget they can simply subclass AdminFileWidget and # specify it in their ModelAdmin.formfield_overrides (though, # if that's the case, why are they using VersatileImageField in # the first place?) del kwargs['widget'] defaults.update(kwargs) return super(VersatileImageField, self).formfield(**defaults)
python
{ "resource": "" }
q271566
PPOIField.value_to_string
test
def value_to_string(self, obj): """Prepare field for serialization.""" if DJANGO_VERSION > (1, 9): value = self.value_from_object(obj) else: value = self._get_val_from_obj(obj) return self.get_prep_value(value)
python
{ "resource": "" }
q271567
autodiscover
test
def autodiscover(): """ Discover versatileimagefield.py modules. Iterate over django.apps.get_app_configs() and discover versatileimagefield.py modules. """ from importlib import import_module from django.apps import apps from django.utils.module_loading import module_has_submodule for app_config in apps.get_app_configs(): # Attempt to import the app's module. try: before_import_sizedimage_registry = copy.copy( versatileimagefield_registry._sizedimage_registry ) before_import_filter_registry = copy.copy( versatileimagefield_registry._filter_registry ) import_module('%s.versatileimagefield' % app_config.name) except Exception: # Reset the versatileimagefield_registry to the state before the # last import as this import will have to reoccur on the next # request and this could raise NotRegistered and AlreadyRegistered # exceptions (see django ticket #8245). versatileimagefield_registry._sizedimage_registry = \ before_import_sizedimage_registry versatileimagefield_registry._filter_registry = \ before_import_filter_registry # Decide whether to bubble up this error. If the app just # doesn't have the module in question, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(app_config.module, 'versatileimagefield'): raise
python
{ "resource": "" }
q271568
VersatileImageFieldRegistry.unregister_sizer
test
def unregister_sizer(self, attr_name): """ Unregister the SizedImage subclass currently assigned to `attr_name`. If a SizedImage subclass isn't already registered to `attr_name` NotRegistered will raise. """ if attr_name not in self._sizedimage_registry: raise NotRegistered( 'No SizedImage subclass is registered to %s' % attr_name ) else: del self._sizedimage_registry[attr_name]
python
{ "resource": "" }
q271569
VersatileImageFieldRegistry.unregister_filter
test
def unregister_filter(self, attr_name): """ Unregister the FilteredImage subclass currently assigned to attr_name. If a FilteredImage subclass isn't already registered to filters. `attr_name` NotRegistered will raise. """ if attr_name not in self._filter_registry: raise NotRegistered( 'No FilteredImage subclass is registered to %s' % attr_name ) else: del self._filter_registry[attr_name]
python
{ "resource": "" }
q271570
VersatileImageMixIn.url
test
def url(self): """ Return the appropriate URL. URL is constructed based on these field conditions: * If empty (not `self.name`) and a placeholder is defined, the URL to the placeholder is returned. * Otherwise, defaults to vanilla ImageFieldFile behavior. """ if not self.name and self.field.placeholder_image_name: return self.storage.url(self.field.placeholder_image_name) return super(VersatileImageMixIn, self).url
python
{ "resource": "" }
q271571
VersatileImageMixIn.build_filters_and_sizers
test
def build_filters_and_sizers(self, ppoi_value, create_on_demand): """Build the filters and sizers for a field.""" name = self.name if not name and self.field.placeholder_image_name: name = self.field.placeholder_image_name self.filters = FilterLibrary( name, self.storage, versatileimagefield_registry, ppoi_value, create_on_demand ) for ( attr_name, sizedimage_cls ) in iteritems(versatileimagefield_registry._sizedimage_registry): setattr( self, attr_name, sizedimage_cls( path_to_image=name, storage=self.storage, create_on_demand=create_on_demand, ppoi=ppoi_value ) )
python
{ "resource": "" }
q271572
VersatileImageMixIn.get_filtered_root_folder
test
def get_filtered_root_folder(self): """Return the location where filtered images are stored.""" folder, filename = os.path.split(self.name) return os.path.join(folder, VERSATILEIMAGEFIELD_FILTERED_DIRNAME, '')
python
{ "resource": "" }
q271573
VersatileImageMixIn.get_sized_root_folder
test
def get_sized_root_folder(self): """Return the location where sized images are stored.""" folder, filename = os.path.split(self.name) return os.path.join(VERSATILEIMAGEFIELD_SIZED_DIRNAME, folder, '')
python
{ "resource": "" }
q271574
VersatileImageMixIn.get_filtered_sized_root_folder
test
def get_filtered_sized_root_folder(self): """Return the location where filtered + sized images are stored.""" sized_root_folder = self.get_sized_root_folder() return os.path.join( sized_root_folder, VERSATILEIMAGEFIELD_FILTERED_DIRNAME )
python
{ "resource": "" }
q271575
VersatileImageMixIn.delete_matching_files_from_storage
test
def delete_matching_files_from_storage(self, root_folder, regex): """ Delete files in `root_folder` which match `regex` before file ext. Example values: * root_folder = 'foo/' * self.name = 'bar.jpg' * regex = re.compile('-baz') Result: * foo/bar-baz.jpg <- Deleted * foo/bar-biz.jpg <- Not deleted """ if not self.name: # pragma: no cover return try: directory_list, file_list = self.storage.listdir(root_folder) except OSError: # pragma: no cover pass else: folder, filename = os.path.split(self.name) basename, ext = os.path.splitext(filename) for f in file_list: if not f.startswith(basename) or not f.endswith(ext): # pragma: no cover continue tag = f[len(basename):-len(ext)] assert f == basename + tag + ext if regex.match(tag) is not None: file_location = os.path.join(root_folder, f) self.storage.delete(file_location) cache.delete( self.storage.url(file_location) ) print( "Deleted {file} (created from: {original})".format( file=os.path.join(root_folder, f), original=self.name ) )
python
{ "resource": "" }
q271576
ProcessedImage.preprocess
test
def preprocess(self, image, image_format): """ Preprocess an image. An API hook for image pre-processing. Calls any image format specific pre-processors (if defined). I.E. If `image_format` is 'JPEG', this method will look for a method named `preprocess_JPEG`, if found `image` will be passed to it. Arguments: * `image`: a PIL Image instance * `image_format`: str, a valid PIL format (i.e. 'JPEG' or 'GIF') Subclasses should return a 2-tuple: * [0]: A PIL Image instance. * [1]: A dictionary of additional keyword arguments to be used when the instance is saved. If no additional keyword arguments, return an empty dict ({}). """ save_kwargs = {'format': image_format} # Ensuring image is properly rotated if hasattr(image, '_getexif'): exif_datadict = image._getexif() # returns None if no EXIF data if exif_datadict is not None: exif = dict(exif_datadict.items()) orientation = exif.get(EXIF_ORIENTATION_KEY, None) if orientation == 3: image = image.transpose(Image.ROTATE_180) elif orientation == 6: image = image.transpose(Image.ROTATE_270) elif orientation == 8: image = image.transpose(Image.ROTATE_90) # Ensure any embedded ICC profile is preserved save_kwargs['icc_profile'] = image.info.get('icc_profile') if hasattr(self, 'preprocess_%s' % image_format): image, addl_save_kwargs = getattr( self, 'preprocess_%s' % image_format )(image=image) save_kwargs.update(addl_save_kwargs) return image, save_kwargs
python
{ "resource": "" }
q271577
ProcessedImage.preprocess_GIF
test
def preprocess_GIF(self, image, **kwargs): """ Receive a PIL Image instance of a GIF and return 2-tuple. Args: * [0]: Original Image instance (passed to `image`) * [1]: Dict with a transparency key (to GIF transparency layer) """ if 'transparency' in image.info: save_kwargs = {'transparency': image.info['transparency']} else: save_kwargs = {} return (image, save_kwargs)
python
{ "resource": "" }
q271578
ProcessedImage.preprocess_JPEG
test
def preprocess_JPEG(self, image, **kwargs): """ Receive a PIL Image instance of a JPEG and returns 2-tuple. Args: * [0]: Image instance, converted to RGB * [1]: Dict with a quality key (mapped to the value of `QUAL` as defined by the `VERSATILEIMAGEFIELD_JPEG_RESIZE_QUALITY` setting) """ save_kwargs = { 'progressive': VERSATILEIMAGEFIELD_PROGRESSIVE_JPEG, 'quality': QUAL } if image.mode != 'RGB': image = image.convert('RGB') return (image, save_kwargs)
python
{ "resource": "" }
q271579
ProcessedImage.retrieve_image
test
def retrieve_image(self, path_to_image): """Return a PIL Image instance stored at `path_to_image`.""" image = self.storage.open(path_to_image, 'rb') file_ext = path_to_image.rsplit('.')[-1] image_format, mime_type = get_image_metadata_from_file_ext(file_ext) return ( Image.open(image), file_ext, image_format, mime_type )
python
{ "resource": "" }
q271580
ProcessedImage.save_image
test
def save_image(self, imagefile, save_path, file_ext, mime_type): """ Save an image to self.storage at `save_path`. Arguments: `imagefile`: Raw image data, typically a BytesIO instance. `save_path`: The path within self.storage where the image should be saved. `file_ext`: The file extension of the image-to-be-saved. `mime_type`: A valid image mime type (as found in versatileimagefield.utils) """ file_to_save = InMemoryUploadedFile( imagefile, None, 'foo.%s' % file_ext, mime_type, imagefile.tell(), None ) file_to_save.seek(0) self.storage.save(save_path, file_to_save)
python
{ "resource": "" }
q271581
SizedImage.ppoi_as_str
test
def ppoi_as_str(self): """Return PPOI value as a string.""" return "%s__%s" % ( str(self.ppoi[0]).replace('.', '-'), str(self.ppoi[1]).replace('.', '-') )
python
{ "resource": "" }
q271582
SizedImage.create_resized_image
test
def create_resized_image(self, path_to_image, save_path_on_storage, width, height): """ Create a resized image. `path_to_image`: The path to the image with the media directory to resize. If `None`, the VERSATILEIMAGEFIELD_PLACEHOLDER_IMAGE will be used. `save_path_on_storage`: Where on self.storage to save the resized image `width`: Width of resized image (int) `height`: Desired height of resized image (int) `filename_key`: A string that will be used in the sized image filename to signify what operation was done to it. Examples: 'crop' or 'scale' """ image, file_ext, image_format, mime_type = self.retrieve_image( path_to_image ) image, save_kwargs = self.preprocess(image, image_format) imagefile = self.process_image( image=image, image_format=image_format, save_kwargs=save_kwargs, width=width, height=height ) self.save_image(imagefile, save_path_on_storage, file_ext, mime_type)
python
{ "resource": "" }
q271583
ClearableFileInputWithImagePreview.render
test
def render(self, name, value, attrs=None, renderer=None): """ Render the widget as an HTML string. Overridden here to support Django < 1.11. """ if self.has_template_widget_rendering: return super(ClearableFileInputWithImagePreview, self).render( name, value, attrs=attrs, renderer=renderer ) else: context = self.get_context(name, value, attrs) return render_to_string(self.template_name, context)
python
{ "resource": "" }
q271584
ClearableFileInputWithImagePreview.get_context
test
def get_context(self, name, value, attrs): """Get the context to render this widget with.""" if self.has_template_widget_rendering: context = super(ClearableFileInputWithImagePreview, self).get_context(name, value, attrs) else: # Build the context manually. context = {} context['widget'] = { 'name': name, 'is_hidden': self.is_hidden, 'required': self.is_required, 'value': self._format_value(value), 'attrs': self.build_attrs(self.attrs, attrs), 'template_name': self.template_name, 'type': self.input_type, } # It seems Django 1.11's ClearableFileInput doesn't add everything to the 'widget' key, so we can't use it # in MultiWidget. Add it manually here. checkbox_name = self.clear_checkbox_name(name) checkbox_id = self.clear_checkbox_id(checkbox_name) context['widget'].update({ 'checkbox_name': checkbox_name, 'checkbox_id': checkbox_id, 'is_initial': self.is_initial(value), 'input_text': self.input_text, 'initial_text': self.initial_text, 'clear_checkbox_label': self.clear_checkbox_label, }) if value and hasattr(value, "url"): context['widget'].update({ 'hidden_field_id': self.get_hidden_field_id(name), 'point_stage_id': self.get_point_stage_id(name), 'ppoi_id': self.get_ppoi_id(name), 'sized_url': self.get_sized_url(value), 'image_preview_id': self.image_preview_id(name), }) return context
python
{ "resource": "" }
q271585
ClearableFileInputWithImagePreview.build_attrs
test
def build_attrs(self, base_attrs, extra_attrs=None): """Build an attribute dictionary.""" attrs = base_attrs.copy() if extra_attrs is not None: attrs.update(extra_attrs) return attrs
python
{ "resource": "" }
q271586
get_resized_path
test
def get_resized_path(path_to_image, width, height, filename_key, storage): """ Return a `path_to_image` location on `storage` as dictated by `width`, `height` and `filename_key` """ containing_folder, filename = os.path.split(path_to_image) resized_filename = get_resized_filename( filename, width, height, filename_key ) joined_path = os.path.join(*[ VERSATILEIMAGEFIELD_SIZED_DIRNAME, containing_folder, resized_filename ]).replace(' ', '') # Removing spaces so this path is memcached friendly return joined_path
python
{ "resource": "" }
q271587
get_filtered_path
test
def get_filtered_path(path_to_image, filename_key, storage): """ Return the 'filtered path' """ containing_folder, filename = os.path.split(path_to_image) filtered_filename = get_filtered_filename(filename, filename_key) path_to_return = os.path.join(*[ containing_folder, VERSATILEIMAGEFIELD_FILTERED_DIRNAME, filtered_filename ]) # Removing spaces so this path is memcached key friendly path_to_return = path_to_return.replace(' ', '') return path_to_return
python
{ "resource": "" }
q271588
validate_versatileimagefield_sizekey_list
test
def validate_versatileimagefield_sizekey_list(sizes): """ Validate a list of size keys. `sizes`: An iterable of 2-tuples, both strings. Example: [ ('large', 'url'), ('medium', 'crop__400x400'), ('small', 'thumbnail__100x100') ] """ try: for key, size_key in sizes: size_key_split = size_key.split('__') if size_key_split[-1] != 'url' and ( 'x' not in size_key_split[-1] ): raise InvalidSizeKey( "{0} is an invalid size. All sizes must be either " "'url' or made up of at least two segments separated " "by double underscores. Examples: 'crop__400x400', " "filters__invert__url".format(size_key) ) except ValueError: raise InvalidSizeKeySet( '{} is an invalid size key set. Size key sets must be an ' 'iterable of 2-tuples'.format(str(sizes)) ) return list(set(sizes))
python
{ "resource": "" }
q271589
get_url_from_image_key
test
def get_url_from_image_key(image_instance, image_key): """Build a URL from `image_key`.""" img_key_split = image_key.split('__') if 'x' in img_key_split[-1]: size_key = img_key_split.pop(-1) else: size_key = None img_url = reduce(getattr, img_key_split, image_instance) if size_key: img_url = img_url[size_key].url return img_url
python
{ "resource": "" }
q271590
get_rendition_key_set
test
def get_rendition_key_set(key): """ Retrieve a validated and prepped Rendition Key Set from settings.VERSATILEIMAGEFIELD_RENDITION_KEY_SETS """ try: rendition_key_set = IMAGE_SETS[key] except KeyError: raise ImproperlyConfigured( "No Rendition Key Set exists at " "settings.VERSATILEIMAGEFIELD_RENDITION_KEY_SETS['{}']".format(key) ) else: return validate_versatileimagefield_sizekey_list(rendition_key_set)
python
{ "resource": "" }
q271591
format_instruction
test
def format_instruction(insn): """ Takes a raw `Instruction` and translates it into a human readable text representation. As of writing, the text representation for WASM is not yet standardized, so we just emit some generic format. """ text = insn.op.mnemonic if not insn.imm: return text return text + ' ' + ', '.join([ getattr(insn.op.imm_struct, x.name).to_string( getattr(insn.imm, x.name) ) for x in insn.op.imm_struct._meta.fields ])
python
{ "resource": "" }
q271592
format_function
test
def format_function( func_body, func_type=None, indent=2, format_locals=True, ): """ Takes a `FunctionBody` and optionally a `FunctionType`, yielding the string representation of the function line by line. The function type is required for formatting function parameter and return value information. """ if func_type is None: yield 'func' else: param_section = ' (param {})'.format(' '.join( map(format_lang_type, func_type.param_types) )) if func_type.param_types else '' result_section = ' (result {})'.format( format_lang_type(func_type.return_type) ) if func_type.return_type else '' yield 'func' + param_section + result_section if format_locals and func_body.locals: yield '(locals {})'.format(' '.join(itertools.chain.from_iterable( itertools.repeat(format_lang_type(x.type), x.count) for x in func_body.locals ))) level = 1 for cur_insn in decode_bytecode(func_body.code): if cur_insn.op.flags & INSN_LEAVE_BLOCK: level -= 1 yield ' ' * (level * indent) + format_instruction(cur_insn) if cur_insn.op.flags & INSN_ENTER_BLOCK: level += 1
python
{ "resource": "" }
q271593
decode_bytecode
test
def decode_bytecode(bytecode): """Decodes raw bytecode, yielding `Instruction`s.""" bytecode_wnd = memoryview(bytecode) while bytecode_wnd: opcode_id = byte2int(bytecode_wnd[0]) opcode = OPCODE_MAP[opcode_id] if opcode.imm_struct is not None: offs, imm, _ = opcode.imm_struct.from_raw(None, bytecode_wnd[1:]) else: imm = None offs = 0 insn_len = 1 + offs yield Instruction(opcode, imm, insn_len) bytecode_wnd = bytecode_wnd[insn_len:]
python
{ "resource": "" }
q271594
decode_module
test
def decode_module(module, decode_name_subsections=False): """Decodes raw WASM modules, yielding `ModuleFragment`s.""" module_wnd = memoryview(module) # Read & yield module header. hdr = ModuleHeader() hdr_len, hdr_data, _ = hdr.from_raw(None, module_wnd) yield ModuleFragment(hdr, hdr_data) module_wnd = module_wnd[hdr_len:] # Read & yield sections. while module_wnd: sec = Section() sec_len, sec_data, _ = sec.from_raw(None, module_wnd) # If requested, decode name subsections when encountered. if ( decode_name_subsections and sec_data.id == SEC_UNK and sec_data.name == SEC_NAME ): sec_wnd = sec_data.payload while sec_wnd: subsec = NameSubSection() subsec_len, subsec_data, _ = subsec.from_raw(None, sec_wnd) yield ModuleFragment(subsec, subsec_data) sec_wnd = sec_wnd[subsec_len:] else: yield ModuleFragment(sec, sec_data) module_wnd = module_wnd[sec_len:]
python
{ "resource": "" }
q271595
deprecated_func
test
def deprecated_func(func): """Deprecates a function, printing a warning on the first usage.""" # We use a mutable container here to work around Py2's lack of # the `nonlocal` keyword. first_usage = [True] @functools.wraps(func) def wrapper(*args, **kwargs): if first_usage[0]: warnings.warn( "Call to deprecated function {}.".format(func.__name__), DeprecationWarning, ) first_usage[0] = False return func(*args, **kwargs) return wrapper
python
{ "resource": "" }
q271596
Manager.connect
test
def connect(self): """connect to the server""" if self.loop is None: # pragma: no cover self.loop = asyncio.get_event_loop() t = asyncio.Task( self.loop.create_connection( self.config['protocol_factory'], self.config['host'], self.config['port'], ssl=self.config['ssl']), loop=self.loop) t.add_done_callback(self.connection_made) return t
python
{ "resource": "" }
q271597
Manager.close
test
def close(self): """Close the connection""" if self.pinger: self.pinger.cancel() self.pinger = None if getattr(self, 'protocol', None): self.protocol.close()
python
{ "resource": "" }
q271598
Request._read_result
test
def _read_result(self): """Parse read a response from the AGI and parse it. :return dict: The AGI response parsed into a dict. """ response = yield from self.reader.readline() return parse_agi_result(response.decode(self.encoding)[:-1])
python
{ "resource": "" }
q271599
Application.handler
test
def handler(self, reader, writer): """AsyncIO coroutine handler to launch socket listening. :Example: :: @asyncio.coroutine def start(request): print('Receive a FastAGI request') print(['AGI variables:', request.headers]) fa_app = Application() fa_app.add_route('calls/start', start) coro = asyncio.start_server(fa_app.handler, '0.0.0.0', 4574) server = loop.run_until_complete(coro) See https://docs.python.org/3/library/asyncio-stream.html """ buffer = b'' while b'\n\n' not in buffer: buffer += yield from reader.read(self.buf_size) lines = buffer[:-2].decode(self.default_encoding).split('\n') headers = OrderedDict([ line.split(': ', 1) for line in lines if ': ' in line ]) agi_network_script = headers.get('agi_network_script') log.info('Received FastAGI request from %r for "%s" route', writer.get_extra_info('peername'), agi_network_script) log.debug("Asterisk Headers: %r", headers) if agi_network_script is not None: route = self._route.get(agi_network_script) if route is not None: request = Request(app=self, headers=headers, reader=reader, writer=writer, encoding=self.default_encoding) try: yield from route(request) except BaseException: log.exception( 'An exception has been raised for the request "%s"', agi_network_script ) else: log.error('No route for the request "%s"', agi_network_script) else: log.error('No agi_network_script header for the request') log.debug("Closing client socket") writer.close()
python
{ "resource": "" }