repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_code_tokens
listlengths
15
672k
func_documentation_string
stringlengths
1
47.2k
func_documentation_tokens
listlengths
1
3.92k
split_name
stringclasses
1 value
func_code_url
stringlengths
85
339
twisted/mantissa
xmantissa/interstore.py
MessageQueue.routeMessage
def routeMessage(self, sender, target, value, messageID): """ Implement L{IMessageRouter.routeMessage} by locating a shared item which provides L{IMessageReceiver}, identified by L{target} in this L{MessageQueue}'s L{Store}, as shared to the specified C{sender}, then invoke its L{messageReceived} method. Then, take the results of that L{messageReceived} invocation and deliver them as an answer to the object specified by L{sender}. If any of these steps fail such that no L{IMessageReceiver.messageReceived} method may be invoked, generate a L{DELIVERY_ERROR} response instead. """ avatarName = sender.localpart + u"@" + sender.domain # Look for the sender. answer = self.store.findUnique( _AlreadyAnswered, AND(_AlreadyAnswered.originalSender == sender, _AlreadyAnswered.messageID == messageID), default=None) if answer is None: role = getPrimaryRole(self.store, avatarName) try: receiver = role.getShare(target.shareID) except NoSuchShare: response = Value(DELIVERY_ERROR, ERROR_NO_SHARE) else: try: def txn(): output = receiver.messageReceived(value, sender, target) if not isinstance(output, Value): raise TypeError("%r returned non-Value %r" % (receiver, output)) return output response = self.store.transact(txn) except RevertAndRespond, rar: response = rar.value except: log.err(Failure(), "An error occurred during inter-store " "message delivery.") response = Value(DELIVERY_ERROR, ERROR_REMOTE_EXCEPTION) answer = _AlreadyAnswered.create(store=self.store, originalSender=sender, originalTarget=target, messageID=messageID, value=response) self._deliverAnswer(answer) self._scheduleMePlease()
python
def routeMessage(self, sender, target, value, messageID): """ Implement L{IMessageRouter.routeMessage} by locating a shared item which provides L{IMessageReceiver}, identified by L{target} in this L{MessageQueue}'s L{Store}, as shared to the specified C{sender}, then invoke its L{messageReceived} method. Then, take the results of that L{messageReceived} invocation and deliver them as an answer to the object specified by L{sender}. If any of these steps fail such that no L{IMessageReceiver.messageReceived} method may be invoked, generate a L{DELIVERY_ERROR} response instead. """ avatarName = sender.localpart + u"@" + sender.domain # Look for the sender. answer = self.store.findUnique( _AlreadyAnswered, AND(_AlreadyAnswered.originalSender == sender, _AlreadyAnswered.messageID == messageID), default=None) if answer is None: role = getPrimaryRole(self.store, avatarName) try: receiver = role.getShare(target.shareID) except NoSuchShare: response = Value(DELIVERY_ERROR, ERROR_NO_SHARE) else: try: def txn(): output = receiver.messageReceived(value, sender, target) if not isinstance(output, Value): raise TypeError("%r returned non-Value %r" % (receiver, output)) return output response = self.store.transact(txn) except RevertAndRespond, rar: response = rar.value except: log.err(Failure(), "An error occurred during inter-store " "message delivery.") response = Value(DELIVERY_ERROR, ERROR_REMOTE_EXCEPTION) answer = _AlreadyAnswered.create(store=self.store, originalSender=sender, originalTarget=target, messageID=messageID, value=response) self._deliverAnswer(answer) self._scheduleMePlease()
[ "def", "routeMessage", "(", "self", ",", "sender", ",", "target", ",", "value", ",", "messageID", ")", ":", "avatarName", "=", "sender", ".", "localpart", "+", "u\"@\"", "+", "sender", ".", "domain", "# Look for the sender.", "answer", "=", "self", ".", "s...
Implement L{IMessageRouter.routeMessage} by locating a shared item which provides L{IMessageReceiver}, identified by L{target} in this L{MessageQueue}'s L{Store}, as shared to the specified C{sender}, then invoke its L{messageReceived} method. Then, take the results of that L{messageReceived} invocation and deliver them as an answer to the object specified by L{sender}. If any of these steps fail such that no L{IMessageReceiver.messageReceived} method may be invoked, generate a L{DELIVERY_ERROR} response instead.
[ "Implement", "L", "{", "IMessageRouter", ".", "routeMessage", "}", "by", "locating", "a", "shared", "item", "which", "provides", "L", "{", "IMessageReceiver", "}", "identified", "by", "L", "{", "target", "}", "in", "this", "L", "{", "MessageQueue", "}", "s...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L549-L598
twisted/mantissa
xmantissa/interstore.py
MessageQueue._deliverAnswer
def _deliverAnswer(self, answer): """ Attempt to deliver an answer to a message sent to this store, via my store's parent's L{IMessageRouter} powerup. @param answer: an L{AlreadyAnswered} that contains an answer to a message sent to this store. """ router = self.siteRouter if answer.deliveryDeferred is None: d = answer.deliveryDeferred = router.routeAnswer( answer.originalSender, answer.originalTarget, answer.value, answer.messageID) def destroyAnswer(result): answer.deleteFromStore() def transportErrorCheck(f): answer.deliveryDeferred = None f.trap(MessageTransportError) d.addCallbacks(destroyAnswer, transportErrorCheck) d.addErrback(log.err)
python
def _deliverAnswer(self, answer): """ Attempt to deliver an answer to a message sent to this store, via my store's parent's L{IMessageRouter} powerup. @param answer: an L{AlreadyAnswered} that contains an answer to a message sent to this store. """ router = self.siteRouter if answer.deliveryDeferred is None: d = answer.deliveryDeferred = router.routeAnswer( answer.originalSender, answer.originalTarget, answer.value, answer.messageID) def destroyAnswer(result): answer.deleteFromStore() def transportErrorCheck(f): answer.deliveryDeferred = None f.trap(MessageTransportError) d.addCallbacks(destroyAnswer, transportErrorCheck) d.addErrback(log.err)
[ "def", "_deliverAnswer", "(", "self", ",", "answer", ")", ":", "router", "=", "self", ".", "siteRouter", "if", "answer", ".", "deliveryDeferred", "is", "None", ":", "d", "=", "answer", ".", "deliveryDeferred", "=", "router", ".", "routeAnswer", "(", "answe...
Attempt to deliver an answer to a message sent to this store, via my store's parent's L{IMessageRouter} powerup. @param answer: an L{AlreadyAnswered} that contains an answer to a message sent to this store.
[ "Attempt", "to", "deliver", "an", "answer", "to", "a", "message", "sent", "to", "this", "store", "via", "my", "store", "s", "parent", "s", "L", "{", "IMessageRouter", "}", "powerup", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L601-L620
twisted/mantissa
xmantissa/interstore.py
MessageQueue.routeAnswer
def routeAnswer(self, originalSender, originalTarget, value, messageID): """ Route an incoming answer to a message originally sent by this queue. """ def txn(): qm = self._messageFromSender(originalSender, messageID) if qm is None: return c = qm.consequence if c is not None: c.answerReceived(value, qm.value, qm.sender, qm.target) elif value.type == DELIVERY_ERROR: try: raise MessageTransportError(value.data) except MessageTransportError: log.err(Failure(), "An unhandled delivery error occurred on a message" " with no consequence.") qm.deleteFromStore() try: self.store.transact(txn) except: log.err(Failure(), "An unhandled error occurred while handling a response to " "an inter-store message.") def answerProcessingFailure(): qm = self._messageFromSender(originalSender, messageID) _FailedAnswer.create(store=qm.store, consequence=qm.consequence, sender=originalSender, target=originalTarget, messageValue=qm.value, answerValue=value) qm.deleteFromStore() self.store.transact(answerProcessingFailure) return defer.succeed(None)
python
def routeAnswer(self, originalSender, originalTarget, value, messageID): """ Route an incoming answer to a message originally sent by this queue. """ def txn(): qm = self._messageFromSender(originalSender, messageID) if qm is None: return c = qm.consequence if c is not None: c.answerReceived(value, qm.value, qm.sender, qm.target) elif value.type == DELIVERY_ERROR: try: raise MessageTransportError(value.data) except MessageTransportError: log.err(Failure(), "An unhandled delivery error occurred on a message" " with no consequence.") qm.deleteFromStore() try: self.store.transact(txn) except: log.err(Failure(), "An unhandled error occurred while handling a response to " "an inter-store message.") def answerProcessingFailure(): qm = self._messageFromSender(originalSender, messageID) _FailedAnswer.create(store=qm.store, consequence=qm.consequence, sender=originalSender, target=originalTarget, messageValue=qm.value, answerValue=value) qm.deleteFromStore() self.store.transact(answerProcessingFailure) return defer.succeed(None)
[ "def", "routeAnswer", "(", "self", ",", "originalSender", ",", "originalTarget", ",", "value", ",", "messageID", ")", ":", "def", "txn", "(", ")", ":", "qm", "=", "self", ".", "_messageFromSender", "(", "originalSender", ",", "messageID", ")", "if", "qm", ...
Route an incoming answer to a message originally sent by this queue.
[ "Route", "an", "incoming", "answer", "to", "a", "message", "originally", "sent", "by", "this", "queue", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L623-L659
twisted/mantissa
xmantissa/interstore.py
MessageQueue._messageFromSender
def _messageFromSender(self, sender, messageID): """ Locate a previously queued message by a given sender and messageID. """ return self.store.findUnique( _QueuedMessage, AND(_QueuedMessage.senderUsername == sender.localpart, _QueuedMessage.senderDomain == sender.domain, _QueuedMessage.messageID == messageID), default=None)
python
def _messageFromSender(self, sender, messageID): """ Locate a previously queued message by a given sender and messageID. """ return self.store.findUnique( _QueuedMessage, AND(_QueuedMessage.senderUsername == sender.localpart, _QueuedMessage.senderDomain == sender.domain, _QueuedMessage.messageID == messageID), default=None)
[ "def", "_messageFromSender", "(", "self", ",", "sender", ",", "messageID", ")", ":", "return", "self", ".", "store", ".", "findUnique", "(", "_QueuedMessage", ",", "AND", "(", "_QueuedMessage", ".", "senderUsername", "==", "sender", ".", "localpart", ",", "_...
Locate a previously queued message by a given sender and messageID.
[ "Locate", "a", "previously", "queued", "message", "by", "a", "given", "sender", "and", "messageID", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L662-L671
twisted/mantissa
xmantissa/interstore.py
MessageQueue._verifySender
def _verifySender(self, sender): """ Verify that this sender is valid. """ if self.store.findFirst( LoginMethod, AND(LoginMethod.localpart == sender.localpart, LoginMethod.domain == sender.domain, LoginMethod.internal == True)) is None: raise BadSender(sender.localpart + u'@' + sender.domain, [lm.localpart + u'@' + lm.domain for lm in self.store.query( LoginMethod, LoginMethod.internal == True)])
python
def _verifySender(self, sender): """ Verify that this sender is valid. """ if self.store.findFirst( LoginMethod, AND(LoginMethod.localpart == sender.localpart, LoginMethod.domain == sender.domain, LoginMethod.internal == True)) is None: raise BadSender(sender.localpart + u'@' + sender.domain, [lm.localpart + u'@' + lm.domain for lm in self.store.query( LoginMethod, LoginMethod.internal == True)])
[ "def", "_verifySender", "(", "self", ",", "sender", ")", ":", "if", "self", ".", "store", ".", "findFirst", "(", "LoginMethod", ",", "AND", "(", "LoginMethod", ".", "localpart", "==", "sender", ".", "localpart", ",", "LoginMethod", ".", "domain", "==", "...
Verify that this sender is valid.
[ "Verify", "that", "this", "sender", "is", "valid", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L674-L686
twisted/mantissa
xmantissa/interstore.py
MessageQueue.queueMessage
def queueMessage(self, sender, target, value, consequence=None): """ Queue a persistent outgoing message. @param sender: The a description of the shared item that is the sender of the message. @type sender: L{xmantissa.sharing.Identifier} @param target: The a description of the shared item that is the target of the message. @type target: L{xmantissa.sharing.Identifier} @param consequence: an item stored in the same database as this L{MessageQueue} implementing L{IDeliveryConsequence}. """ self.messageCounter += 1 _QueuedMessage.create(store=self.store, sender=sender, target=target, value=value, messageID=self.messageCounter, consequence=consequence) self._scheduleMePlease()
python
def queueMessage(self, sender, target, value, consequence=None): """ Queue a persistent outgoing message. @param sender: The a description of the shared item that is the sender of the message. @type sender: L{xmantissa.sharing.Identifier} @param target: The a description of the shared item that is the target of the message. @type target: L{xmantissa.sharing.Identifier} @param consequence: an item stored in the same database as this L{MessageQueue} implementing L{IDeliveryConsequence}. """ self.messageCounter += 1 _QueuedMessage.create(store=self.store, sender=sender, target=target, value=value, messageID=self.messageCounter, consequence=consequence) self._scheduleMePlease()
[ "def", "queueMessage", "(", "self", ",", "sender", ",", "target", ",", "value", ",", "consequence", "=", "None", ")", ":", "self", ".", "messageCounter", "+=", "1", "_QueuedMessage", ".", "create", "(", "store", "=", "self", ".", "store", ",", "sender", ...
Queue a persistent outgoing message. @param sender: The a description of the shared item that is the sender of the message. @type sender: L{xmantissa.sharing.Identifier} @param target: The a description of the shared item that is the target of the message. @type target: L{xmantissa.sharing.Identifier} @param consequence: an item stored in the same database as this L{MessageQueue} implementing L{IDeliveryConsequence}.
[ "Queue", "a", "persistent", "outgoing", "message", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L689-L712
twisted/mantissa
xmantissa/interstore.py
MessageQueue.run
def run(self): """ Attmept to deliver the first outgoing L{QueuedMessage}; return a time to reschedule if there are still more retries or outgoing messages to send. """ delay = None router = self.siteRouter for qmsg in self.store.query(_QueuedMessage, sort=_QueuedMessage.storeID.ascending): try: self._verifySender(qmsg.sender) except: self.routeAnswer(qmsg.sender, qmsg.target, Value(DELIVERY_ERROR, ERROR_BAD_SENDER), qmsg.messageID) log.err(Failure(), "Could not verify sender for sending message.") else: router.routeMessage(qmsg.sender, qmsg.target, qmsg.value, qmsg.messageID) for answer in self.store.query(_AlreadyAnswered, sort=_AlreadyAnswered.storeID.ascending): self._deliverAnswer(answer) nextmsg = self.store.findFirst(_QueuedMessage, default=None) if nextmsg is not None: delay = _RETRANSMIT_DELAY else: nextanswer = self.store.findFirst(_AlreadyAnswered, default=None) if nextanswer is not None: delay = _RETRANSMIT_DELAY if delay is not None: return IScheduler(self.store).now() + timedelta(seconds=delay)
python
def run(self): """ Attmept to deliver the first outgoing L{QueuedMessage}; return a time to reschedule if there are still more retries or outgoing messages to send. """ delay = None router = self.siteRouter for qmsg in self.store.query(_QueuedMessage, sort=_QueuedMessage.storeID.ascending): try: self._verifySender(qmsg.sender) except: self.routeAnswer(qmsg.sender, qmsg.target, Value(DELIVERY_ERROR, ERROR_BAD_SENDER), qmsg.messageID) log.err(Failure(), "Could not verify sender for sending message.") else: router.routeMessage(qmsg.sender, qmsg.target, qmsg.value, qmsg.messageID) for answer in self.store.query(_AlreadyAnswered, sort=_AlreadyAnswered.storeID.ascending): self._deliverAnswer(answer) nextmsg = self.store.findFirst(_QueuedMessage, default=None) if nextmsg is not None: delay = _RETRANSMIT_DELAY else: nextanswer = self.store.findFirst(_AlreadyAnswered, default=None) if nextanswer is not None: delay = _RETRANSMIT_DELAY if delay is not None: return IScheduler(self.store).now() + timedelta(seconds=delay)
[ "def", "run", "(", "self", ")", ":", "delay", "=", "None", "router", "=", "self", ".", "siteRouter", "for", "qmsg", "in", "self", ".", "store", ".", "query", "(", "_QueuedMessage", ",", "sort", "=", "_QueuedMessage", ".", "storeID", ".", "ascending", "...
Attmept to deliver the first outgoing L{QueuedMessage}; return a time to reschedule if there are still more retries or outgoing messages to send.
[ "Attmept", "to", "deliver", "the", "first", "outgoing", "L", "{", "QueuedMessage", "}", ";", "return", "a", "time", "to", "reschedule", "if", "there", "are", "still", "more", "retries", "or", "outgoing", "messages", "to", "send", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L715-L748
twisted/mantissa
xmantissa/interstore.py
_ProtoAttributeArgument.fromBox
def fromBox(self, name, strings, objects, proto): """ Retreive an attribute from the C{proto} parameter. """ objects[name] = getattr(proto, self.attr)
python
def fromBox(self, name, strings, objects, proto): """ Retreive an attribute from the C{proto} parameter. """ objects[name] = getattr(proto, self.attr)
[ "def", "fromBox", "(", "self", ",", "name", ",", "strings", ",", "objects", ",", "proto", ")", ":", "objects", "[", "name", "]", "=", "getattr", "(", "proto", ",", "self", ".", "attr", ")" ]
Retreive an attribute from the C{proto} parameter.
[ "Retreive", "an", "attribute", "from", "the", "C", "{", "proto", "}", "parameter", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L781-L785
twisted/mantissa
xmantissa/interstore.py
AMPMessenger.messageRemote
def messageRemote(self, cmdObj, consequence=None, **args): """ Send a message to the peer identified by the target, via the given L{Command} object and arguments. @param cmdObj: a L{twisted.protocols.amp.Command}, whose serialized form will be the message. @param consequence: an L{IDeliveryConsequence} provider which will handle the result of this message (or None, if no response processing is desired). @param args: keyword arguments which match the C{cmdObj}'s arguments list. @return: L{None} """ messageBox = cmdObj.makeArguments(args, self) messageBox[COMMAND] = cmdObj.commandName messageData = messageBox.serialize() self.queue.queueMessage(self.sender, self.target, Value(AMP_MESSAGE_TYPE, messageData), consequence)
python
def messageRemote(self, cmdObj, consequence=None, **args): """ Send a message to the peer identified by the target, via the given L{Command} object and arguments. @param cmdObj: a L{twisted.protocols.amp.Command}, whose serialized form will be the message. @param consequence: an L{IDeliveryConsequence} provider which will handle the result of this message (or None, if no response processing is desired). @param args: keyword arguments which match the C{cmdObj}'s arguments list. @return: L{None} """ messageBox = cmdObj.makeArguments(args, self) messageBox[COMMAND] = cmdObj.commandName messageData = messageBox.serialize() self.queue.queueMessage(self.sender, self.target, Value(AMP_MESSAGE_TYPE, messageData), consequence)
[ "def", "messageRemote", "(", "self", ",", "cmdObj", ",", "consequence", "=", "None", ",", "*", "*", "args", ")", ":", "messageBox", "=", "cmdObj", ".", "makeArguments", "(", "args", ",", "self", ")", "messageBox", "[", "COMMAND", "]", "=", "cmdObj", "....
Send a message to the peer identified by the target, via the given L{Command} object and arguments. @param cmdObj: a L{twisted.protocols.amp.Command}, whose serialized form will be the message. @param consequence: an L{IDeliveryConsequence} provider which will handle the result of this message (or None, if no response processing is desired). @param args: keyword arguments which match the C{cmdObj}'s arguments list. @return: L{None}
[ "Send", "a", "message", "to", "the", "peer", "identified", "by", "the", "target", "via", "the", "given", "L", "{", "Command", "}", "object", "and", "arguments", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L840-L862
twisted/mantissa
xmantissa/interstore.py
_AMPExposer.expose
def expose(self, commandObject): """ Declare a method as being related to the given command object. @param commandObject: a L{Command} subclass. """ thunk = super(_AMPExposer, self).expose(commandObject.commandName) def thunkplus(function): result = thunk(function) result.command = commandObject return result return thunkplus
python
def expose(self, commandObject): """ Declare a method as being related to the given command object. @param commandObject: a L{Command} subclass. """ thunk = super(_AMPExposer, self).expose(commandObject.commandName) def thunkplus(function): result = thunk(function) result.command = commandObject return result return thunkplus
[ "def", "expose", "(", "self", ",", "commandObject", ")", ":", "thunk", "=", "super", "(", "_AMPExposer", ",", "self", ")", ".", "expose", "(", "commandObject", ".", "commandName", ")", "def", "thunkplus", "(", "function", ")", ":", "result", "=", "thunk"...
Declare a method as being related to the given command object. @param commandObject: a L{Command} subclass.
[ "Declare", "a", "method", "as", "being", "related", "to", "the", "given", "command", "object", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L871-L882
twisted/mantissa
xmantissa/interstore.py
_AMPExposer.responderForName
def responderForName(self, instance, commandName): """ When resolving a command to a method from the wire, the information available is the command's name; look up a command. @param instance: an instance of a class who has methods exposed via this exposer's L{_AMPExposer.expose} method. @param commandName: the C{commandName} attribute of a L{Command} exposed on the given instance. @return: a bound method with a C{command} attribute. """ method = super(_AMPExposer, self).get(instance, commandName) return method
python
def responderForName(self, instance, commandName): """ When resolving a command to a method from the wire, the information available is the command's name; look up a command. @param instance: an instance of a class who has methods exposed via this exposer's L{_AMPExposer.expose} method. @param commandName: the C{commandName} attribute of a L{Command} exposed on the given instance. @return: a bound method with a C{command} attribute. """ method = super(_AMPExposer, self).get(instance, commandName) return method
[ "def", "responderForName", "(", "self", ",", "instance", ",", "commandName", ")", ":", "method", "=", "super", "(", "_AMPExposer", ",", "self", ")", ".", "get", "(", "instance", ",", "commandName", ")", "return", "method" ]
When resolving a command to a method from the wire, the information available is the command's name; look up a command. @param instance: an instance of a class who has methods exposed via this exposer's L{_AMPExposer.expose} method. @param commandName: the C{commandName} attribute of a L{Command} exposed on the given instance. @return: a bound method with a C{command} attribute.
[ "When", "resolving", "a", "command", "to", "a", "method", "from", "the", "wire", "the", "information", "available", "is", "the", "command", "s", "name", ";", "look", "up", "a", "command", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L885-L899
twisted/mantissa
xmantissa/interstore.py
_AMPErrorExposer.expose
def expose(self, commandObject, exceptionType): """ Expose a function for processing a given AMP error. """ thunk = super(_AMPErrorExposer, self).expose( (commandObject.commandName, commandObject.errors.get(exceptionType))) def thunkplus(function): result = thunk(function) result.command = commandObject result.exception = exceptionType return result return thunkplus
python
def expose(self, commandObject, exceptionType): """ Expose a function for processing a given AMP error. """ thunk = super(_AMPErrorExposer, self).expose( (commandObject.commandName, commandObject.errors.get(exceptionType))) def thunkplus(function): result = thunk(function) result.command = commandObject result.exception = exceptionType return result return thunkplus
[ "def", "expose", "(", "self", ",", "commandObject", ",", "exceptionType", ")", ":", "thunk", "=", "super", "(", "_AMPErrorExposer", ",", "self", ")", ".", "expose", "(", "(", "commandObject", ".", "commandName", ",", "commandObject", ".", "errors", ".", "g...
Expose a function for processing a given AMP error.
[ "Expose", "a", "function", "for", "processing", "a", "given", "AMP", "error", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L909-L921
twisted/mantissa
xmantissa/interstore.py
_AMPErrorExposer.errbackForName
def errbackForName(self, instance, commandName, errorName): """ Retrieve an errback - a callable object that accepts a L{Failure} as an argument - that is exposed on the given instance, given an AMP commandName and a name in that command's error mapping. """ return super(_AMPErrorExposer, self).get(instance, (commandName, errorName))
python
def errbackForName(self, instance, commandName, errorName): """ Retrieve an errback - a callable object that accepts a L{Failure} as an argument - that is exposed on the given instance, given an AMP commandName and a name in that command's error mapping. """ return super(_AMPErrorExposer, self).get(instance, (commandName, errorName))
[ "def", "errbackForName", "(", "self", ",", "instance", ",", "commandName", ",", "errorName", ")", ":", "return", "super", "(", "_AMPErrorExposer", ",", "self", ")", ".", "get", "(", "instance", ",", "(", "commandName", ",", "errorName", ")", ")" ]
Retrieve an errback - a callable object that accepts a L{Failure} as an argument - that is exposed on the given instance, given an AMP commandName and a name in that command's error mapping.
[ "Retrieve", "an", "errback", "-", "a", "callable", "object", "that", "accepts", "a", "L", "{", "Failure", "}", "as", "an", "argument", "-", "that", "is", "exposed", "on", "the", "given", "instance", "given", "an", "AMP", "commandName", "and", "a", "name"...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L924-L930
twisted/mantissa
xmantissa/interstore.py
AMPReceiver._boxFromData
def _boxFromData(self, messageData): """ A box. @param messageData: a serialized AMP box representing either a message or an error. @type messageData: L{str} @raise MalformedMessage: if the C{messageData} parameter does not parse to exactly one AMP box. """ inputBoxes = parseString(messageData) if not len(inputBoxes) == 1: raise MalformedMessage() [inputBox] = inputBoxes return inputBox
python
def _boxFromData(self, messageData): """ A box. @param messageData: a serialized AMP box representing either a message or an error. @type messageData: L{str} @raise MalformedMessage: if the C{messageData} parameter does not parse to exactly one AMP box. """ inputBoxes = parseString(messageData) if not len(inputBoxes) == 1: raise MalformedMessage() [inputBox] = inputBoxes return inputBox
[ "def", "_boxFromData", "(", "self", ",", "messageData", ")", ":", "inputBoxes", "=", "parseString", "(", "messageData", ")", "if", "not", "len", "(", "inputBoxes", ")", "==", "1", ":", "raise", "MalformedMessage", "(", ")", "[", "inputBox", "]", "=", "in...
A box. @param messageData: a serialized AMP box representing either a message or an error. @type messageData: L{str} @raise MalformedMessage: if the C{messageData} parameter does not parse to exactly one AMP box.
[ "A", "box", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L994-L1009
twisted/mantissa
xmantissa/interstore.py
AMPReceiver.messageReceived
def messageReceived(self, value, sender, target): """ An AMP-formatted message was received. Dispatch to the appropriate command responder, i.e. a method on this object exposed with L{commandMethod.expose}. @see IMessageReceiver.messageReceived """ if value.type != AMP_MESSAGE_TYPE: raise UnknownMessageType() inputBox = self._boxFromData(value.data) thunk = commandMethod.responderForName(self, inputBox[COMMAND]) placeholder = _ProtocolPlaceholder(sender, target) arguments = thunk.command.parseArguments(inputBox, placeholder) try: result = thunk(**arguments) except tuple(thunk.command.errors.keys()), knownError: errorCode = thunk.command.errors[knownError.__class__] raise RevertAndRespond( Value(AMP_ANSWER_TYPE, Box(_error_code=errorCode, _error_description=str(knownError)).serialize())) else: response = thunk.command.makeResponse(result, None) return Value(AMP_ANSWER_TYPE, response.serialize())
python
def messageReceived(self, value, sender, target): """ An AMP-formatted message was received. Dispatch to the appropriate command responder, i.e. a method on this object exposed with L{commandMethod.expose}. @see IMessageReceiver.messageReceived """ if value.type != AMP_MESSAGE_TYPE: raise UnknownMessageType() inputBox = self._boxFromData(value.data) thunk = commandMethod.responderForName(self, inputBox[COMMAND]) placeholder = _ProtocolPlaceholder(sender, target) arguments = thunk.command.parseArguments(inputBox, placeholder) try: result = thunk(**arguments) except tuple(thunk.command.errors.keys()), knownError: errorCode = thunk.command.errors[knownError.__class__] raise RevertAndRespond( Value(AMP_ANSWER_TYPE, Box(_error_code=errorCode, _error_description=str(knownError)).serialize())) else: response = thunk.command.makeResponse(result, None) return Value(AMP_ANSWER_TYPE, response.serialize())
[ "def", "messageReceived", "(", "self", ",", "value", ",", "sender", ",", "target", ")", ":", "if", "value", ".", "type", "!=", "AMP_MESSAGE_TYPE", ":", "raise", "UnknownMessageType", "(", ")", "inputBox", "=", "self", ".", "_boxFromData", "(", "value", "."...
An AMP-formatted message was received. Dispatch to the appropriate command responder, i.e. a method on this object exposed with L{commandMethod.expose}. @see IMessageReceiver.messageReceived
[ "An", "AMP", "-", "formatted", "message", "was", "received", ".", "Dispatch", "to", "the", "appropriate", "command", "responder", "i", ".", "e", ".", "a", "method", "on", "this", "object", "exposed", "with", "L", "{", "commandMethod", ".", "expose", "}", ...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L1012-L1036
twisted/mantissa
xmantissa/interstore.py
AMPReceiver.answerReceived
def answerReceived(self, value, originalValue, originalSender, originalTarget): """ An answer was received. Dispatch to the appropriate answer responder, i.e. a method on this object exposed with L{answerMethod.expose}. @see IDeliveryConsequence.answerReceived """ if value.type != AMP_ANSWER_TYPE: raise UnknownMessageType() commandName = self._boxFromData(originalValue.data)[COMMAND] rawArgs = self._boxFromData(value.data) placeholder = _ProtocolPlaceholder(originalSender, originalTarget) if ERROR in rawArgs: thunk = errorMethod.errbackForName(self, commandName, rawArgs[ERROR]) thunk(Failure(thunk.exception())) else: thunk = answerMethod.responderForName(self, commandName) arguments = thunk.command.parseResponse(rawArgs, placeholder) thunk(**arguments)
python
def answerReceived(self, value, originalValue, originalSender, originalTarget): """ An answer was received. Dispatch to the appropriate answer responder, i.e. a method on this object exposed with L{answerMethod.expose}. @see IDeliveryConsequence.answerReceived """ if value.type != AMP_ANSWER_TYPE: raise UnknownMessageType() commandName = self._boxFromData(originalValue.data)[COMMAND] rawArgs = self._boxFromData(value.data) placeholder = _ProtocolPlaceholder(originalSender, originalTarget) if ERROR in rawArgs: thunk = errorMethod.errbackForName(self, commandName, rawArgs[ERROR]) thunk(Failure(thunk.exception())) else: thunk = answerMethod.responderForName(self, commandName) arguments = thunk.command.parseResponse(rawArgs, placeholder) thunk(**arguments)
[ "def", "answerReceived", "(", "self", ",", "value", ",", "originalValue", ",", "originalSender", ",", "originalTarget", ")", ":", "if", "value", ".", "type", "!=", "AMP_ANSWER_TYPE", ":", "raise", "UnknownMessageType", "(", ")", "commandName", "=", "self", "."...
An answer was received. Dispatch to the appropriate answer responder, i.e. a method on this object exposed with L{answerMethod.expose}. @see IDeliveryConsequence.answerReceived
[ "An", "answer", "was", "received", ".", "Dispatch", "to", "the", "appropriate", "answer", "responder", "i", ".", "e", ".", "a", "method", "on", "this", "object", "exposed", "with", "L", "{", "answerMethod", ".", "expose", "}", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/interstore.py#L1039-L1058
xav-b/pyconsul
pyconsul/factory.py
Consultant._get
def _get(self, resource, payload=None): ''' Wrapper around requests.get that shorten caller url and takes care of errors ''' # Avoid dangerous default function argument `{}` payload = payload or {} # Build the request and return json response return requests.get( '{}/{}/{}'.format( self.master, pyconsul.__consul_api_version__, resource), params=payload )
python
def _get(self, resource, payload=None): ''' Wrapper around requests.get that shorten caller url and takes care of errors ''' # Avoid dangerous default function argument `{}` payload = payload or {} # Build the request and return json response return requests.get( '{}/{}/{}'.format( self.master, pyconsul.__consul_api_version__, resource), params=payload )
[ "def", "_get", "(", "self", ",", "resource", ",", "payload", "=", "None", ")", ":", "# Avoid dangerous default function argument `{}`", "payload", "=", "payload", "or", "{", "}", "# Build the request and return json response", "return", "requests", ".", "get", "(", ...
Wrapper around requests.get that shorten caller url and takes care of errors
[ "Wrapper", "around", "requests", ".", "get", "that", "shorten", "caller", "url", "and", "takes", "care", "of", "errors" ]
train
https://github.com/xav-b/pyconsul/blob/06ce3b921d01010c19643424486bea4b22196076/pyconsul/factory.py#L34-L44
xav-b/pyconsul
pyconsul/factory.py
Consultant._put
def _put(self, resource, payload=None): ''' Wrapper around requests.put that shorten caller url and takes care of errors ''' # Avoid dangerous default function argument `{}` payload = payload or {} # Build the request and return json response return requests.put( '{}/{}/{}'.format( self.master, pyconsul.__consul_api_version__, resource), params=payload )
python
def _put(self, resource, payload=None): ''' Wrapper around requests.put that shorten caller url and takes care of errors ''' # Avoid dangerous default function argument `{}` payload = payload or {} # Build the request and return json response return requests.put( '{}/{}/{}'.format( self.master, pyconsul.__consul_api_version__, resource), params=payload )
[ "def", "_put", "(", "self", ",", "resource", ",", "payload", "=", "None", ")", ":", "# Avoid dangerous default function argument `{}`", "payload", "=", "payload", "or", "{", "}", "# Build the request and return json response", "return", "requests", ".", "put", "(", ...
Wrapper around requests.put that shorten caller url and takes care of errors
[ "Wrapper", "around", "requests", ".", "put", "that", "shorten", "caller", "url", "and", "takes", "care", "of", "errors" ]
train
https://github.com/xav-b/pyconsul/blob/06ce3b921d01010c19643424486bea4b22196076/pyconsul/factory.py#L47-L57
openvax/datacache
datacache/database.py
Database.table_names
def table_names(self): """Returns names of all tables in the database""" query = "SELECT name FROM sqlite_master WHERE type='table'" cursor = self.connection.execute(query) results = cursor.fetchall() return [result_tuple[0] for result_tuple in results]
python
def table_names(self): """Returns names of all tables in the database""" query = "SELECT name FROM sqlite_master WHERE type='table'" cursor = self.connection.execute(query) results = cursor.fetchall() return [result_tuple[0] for result_tuple in results]
[ "def", "table_names", "(", "self", ")", ":", "query", "=", "\"SELECT name FROM sqlite_master WHERE type='table'\"", "cursor", "=", "self", ".", "connection", ".", "execute", "(", "query", ")", "results", "=", "cursor", ".", "fetchall", "(", ")", "return", "[", ...
Returns names of all tables in the database
[ "Returns", "names", "of", "all", "tables", "in", "the", "database" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L48-L53
openvax/datacache
datacache/database.py
Database.drop_all_tables
def drop_all_tables(self): """Drop all tables in the database""" for table_name in self.table_names(): self.execute_sql("DROP TABLE %s" % table_name) self.connection.commit()
python
def drop_all_tables(self): """Drop all tables in the database""" for table_name in self.table_names(): self.execute_sql("DROP TABLE %s" % table_name) self.connection.commit()
[ "def", "drop_all_tables", "(", "self", ")", ":", "for", "table_name", "in", "self", ".", "table_names", "(", ")", ":", "self", ".", "execute_sql", "(", "\"DROP TABLE %s\"", "%", "table_name", ")", "self", ".", "connection", ".", "commit", "(", ")" ]
Drop all tables in the database
[ "Drop", "all", "tables", "in", "the", "database" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L60-L64
openvax/datacache
datacache/database.py
Database.execute_sql
def execute_sql(self, sql, commit=False): """Log and then execute a SQL query""" logger.info("Running sqlite query: \"%s\"", sql) self.connection.execute(sql) if commit: self.connection.commit()
python
def execute_sql(self, sql, commit=False): """Log and then execute a SQL query""" logger.info("Running sqlite query: \"%s\"", sql) self.connection.execute(sql) if commit: self.connection.commit()
[ "def", "execute_sql", "(", "self", ",", "sql", ",", "commit", "=", "False", ")", ":", "logger", ".", "info", "(", "\"Running sqlite query: \\\"%s\\\"\"", ",", "sql", ")", "self", ".", "connection", ".", "execute", "(", "sql", ")", "if", "commit", ":", "s...
Log and then execute a SQL query
[ "Log", "and", "then", "execute", "a", "SQL", "query" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L66-L71
openvax/datacache
datacache/database.py
Database.version
def version(self): """What's the version of this database? Found in metadata attached by datacache when creating this database.""" query = "SELECT version FROM %s" % METADATA_TABLE_NAME cursor = self.connection.execute(query) version = cursor.fetchone() if not version: return 0 else: return int(version[0])
python
def version(self): """What's the version of this database? Found in metadata attached by datacache when creating this database.""" query = "SELECT version FROM %s" % METADATA_TABLE_NAME cursor = self.connection.execute(query) version = cursor.fetchone() if not version: return 0 else: return int(version[0])
[ "def", "version", "(", "self", ")", ":", "query", "=", "\"SELECT version FROM %s\"", "%", "METADATA_TABLE_NAME", "cursor", "=", "self", ".", "connection", ".", "execute", "(", "query", ")", "version", "=", "cursor", ".", "fetchone", "(", ")", "if", "not", ...
What's the version of this database? Found in metadata attached by datacache when creating this database.
[ "What", "s", "the", "version", "of", "this", "database?", "Found", "in", "metadata", "attached", "by", "datacache", "when", "creating", "this", "database", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L85-L94
openvax/datacache
datacache/database.py
Database._finalize_database
def _finalize_database(self, version): """ Create metadata table for database with version number. Parameters ---------- version : int Tag created database with user-specified version number """ require_integer(version, "version") create_metadata_sql = \ "CREATE TABLE %s (version INT)" % METADATA_TABLE_NAME self.execute_sql(create_metadata_sql) insert_version_sql = \ "INSERT INTO %s VALUES (%s)" % (METADATA_TABLE_NAME, version) self.execute_sql(insert_version_sql)
python
def _finalize_database(self, version): """ Create metadata table for database with version number. Parameters ---------- version : int Tag created database with user-specified version number """ require_integer(version, "version") create_metadata_sql = \ "CREATE TABLE %s (version INT)" % METADATA_TABLE_NAME self.execute_sql(create_metadata_sql) insert_version_sql = \ "INSERT INTO %s VALUES (%s)" % (METADATA_TABLE_NAME, version) self.execute_sql(insert_version_sql)
[ "def", "_finalize_database", "(", "self", ",", "version", ")", ":", "require_integer", "(", "version", ",", "\"version\"", ")", "create_metadata_sql", "=", "\"CREATE TABLE %s (version INT)\"", "%", "METADATA_TABLE_NAME", "self", ".", "execute_sql", "(", "create_metadata...
Create metadata table for database with version number. Parameters ---------- version : int Tag created database with user-specified version number
[ "Create", "metadata", "table", "for", "database", "with", "version", "number", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L96-L111
openvax/datacache
datacache/database.py
Database._create_table
def _create_table(self, table_name, column_types, primary=None, nullable=()): """Creates a sqlite3 table from the given metadata. Parameters ---------- column_types : list of (str, str) pairs First element of each tuple is the column name, second element is the sqlite3 type primary : str, optional Which column is the primary key nullable : iterable, optional Names of columns which have null values """ require_string(table_name, "table name") require_iterable_of(column_types, tuple, name="rows") if primary is not None: require_string(primary, "primary") require_iterable_of(nullable, str, name="nullable") column_decls = [] for column_name, column_type in column_types: decl = "%s %s" % (column_name, column_type) if column_name == primary: decl += " UNIQUE PRIMARY KEY" if column_name not in nullable: decl += " NOT NULL" column_decls.append(decl) column_decl_str = ", ".join(column_decls) create_table_sql = \ "CREATE TABLE %s (%s)" % (table_name, column_decl_str) self.execute_sql(create_table_sql)
python
def _create_table(self, table_name, column_types, primary=None, nullable=()): """Creates a sqlite3 table from the given metadata. Parameters ---------- column_types : list of (str, str) pairs First element of each tuple is the column name, second element is the sqlite3 type primary : str, optional Which column is the primary key nullable : iterable, optional Names of columns which have null values """ require_string(table_name, "table name") require_iterable_of(column_types, tuple, name="rows") if primary is not None: require_string(primary, "primary") require_iterable_of(nullable, str, name="nullable") column_decls = [] for column_name, column_type in column_types: decl = "%s %s" % (column_name, column_type) if column_name == primary: decl += " UNIQUE PRIMARY KEY" if column_name not in nullable: decl += " NOT NULL" column_decls.append(decl) column_decl_str = ", ".join(column_decls) create_table_sql = \ "CREATE TABLE %s (%s)" % (table_name, column_decl_str) self.execute_sql(create_table_sql)
[ "def", "_create_table", "(", "self", ",", "table_name", ",", "column_types", ",", "primary", "=", "None", ",", "nullable", "=", "(", ")", ")", ":", "require_string", "(", "table_name", ",", "\"table name\"", ")", "require_iterable_of", "(", "column_types", ","...
Creates a sqlite3 table from the given metadata. Parameters ---------- column_types : list of (str, str) pairs First element of each tuple is the column name, second element is the sqlite3 type primary : str, optional Which column is the primary key nullable : iterable, optional Names of columns which have null values
[ "Creates", "a", "sqlite3", "table", "from", "the", "given", "metadata", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L113-L145
openvax/datacache
datacache/database.py
Database.create
def create(self, tables, version): """Do the actual work of creating the database, filling its tables with values, creating indices, and setting the datacache version metadata. Parameters ---------- tables : list List of datacache.DatabaseTable objects version : int """ for table in tables: self._create_table( table_name=table.name, column_types=table.column_types, primary=table.primary_key, nullable=table.nullable) self._fill_table(table.name, table.rows) self._create_indices(table.name, table.indices) self._finalize_database(version) self._commit()
python
def create(self, tables, version): """Do the actual work of creating the database, filling its tables with values, creating indices, and setting the datacache version metadata. Parameters ---------- tables : list List of datacache.DatabaseTable objects version : int """ for table in tables: self._create_table( table_name=table.name, column_types=table.column_types, primary=table.primary_key, nullable=table.nullable) self._fill_table(table.name, table.rows) self._create_indices(table.name, table.indices) self._finalize_database(version) self._commit()
[ "def", "create", "(", "self", ",", "tables", ",", "version", ")", ":", "for", "table", "in", "tables", ":", "self", ".", "_create_table", "(", "table_name", "=", "table", ".", "name", ",", "column_types", "=", "table", ".", "column_types", ",", "primary"...
Do the actual work of creating the database, filling its tables with values, creating indices, and setting the datacache version metadata. Parameters ---------- tables : list List of datacache.DatabaseTable objects version : int
[ "Do", "the", "actual", "work", "of", "creating", "the", "database", "filling", "its", "tables", "with", "values", "creating", "indices", "and", "setting", "the", "datacache", "version", "metadata", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L166-L186
openvax/datacache
datacache/database.py
Database._create_index
def _create_index(self, table_name, index_columns): """ Creates an index over multiple columns of a given table. Parameters ---------- table_name : str index_columns : iterable of str Which columns should be indexed """ logger.info( "Creating index on %s (%s)", table_name, ", ".join(index_columns)) index_name = "%s_index_%s" % ( table_name, "_".join(index_columns)) self.connection.execute( "CREATE INDEX IF NOT EXISTS %s ON %s (%s)" % ( index_name, table_name, ", ".join(index_columns)))
python
def _create_index(self, table_name, index_columns): """ Creates an index over multiple columns of a given table. Parameters ---------- table_name : str index_columns : iterable of str Which columns should be indexed """ logger.info( "Creating index on %s (%s)", table_name, ", ".join(index_columns)) index_name = "%s_index_%s" % ( table_name, "_".join(index_columns)) self.connection.execute( "CREATE INDEX IF NOT EXISTS %s ON %s (%s)" % ( index_name, table_name, ", ".join(index_columns)))
[ "def", "_create_index", "(", "self", ",", "table_name", ",", "index_columns", ")", ":", "logger", ".", "info", "(", "\"Creating index on %s (%s)\"", ",", "table_name", ",", "\", \"", ".", "join", "(", "index_columns", ")", ")", "index_name", "=", "\"%s_index_%s\...
Creates an index over multiple columns of a given table. Parameters ---------- table_name : str index_columns : iterable of str Which columns should be indexed
[ "Creates", "an", "index", "over", "multiple", "columns", "of", "a", "given", "table", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L188-L211
openvax/datacache
datacache/database.py
Database._create_indices
def _create_indices(self, table_name, indices): """ Create multiple indices (each over multiple columns) on a given table. Parameters ---------- table_name : str indices : iterable of tuples Multiple groups of columns, each of which should be indexed. """ require_string(table_name, "table_name") require_iterable_of(indices, (tuple, list)) for index_column_set in indices: self._create_index(table_name, index_column_set)
python
def _create_indices(self, table_name, indices): """ Create multiple indices (each over multiple columns) on a given table. Parameters ---------- table_name : str indices : iterable of tuples Multiple groups of columns, each of which should be indexed. """ require_string(table_name, "table_name") require_iterable_of(indices, (tuple, list)) for index_column_set in indices: self._create_index(table_name, index_column_set)
[ "def", "_create_indices", "(", "self", ",", "table_name", ",", "indices", ")", ":", "require_string", "(", "table_name", ",", "\"table_name\"", ")", "require_iterable_of", "(", "indices", ",", "(", "tuple", ",", "list", ")", ")", "for", "index_column_set", "in...
Create multiple indices (each over multiple columns) on a given table. Parameters ---------- table_name : str indices : iterable of tuples Multiple groups of columns, each of which should be indexed.
[ "Create", "multiple", "indices", "(", "each", "over", "multiple", "columns", ")", "on", "a", "given", "table", "." ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/database.py#L213-L227
twisted/mantissa
xmantissa/_webutil.py
WebViewerHelper.wrapModel
def wrapModel(self, model): """ Converts application-provided model objects to L{IResource} providers. """ res = IResource(model, None) if res is None: frag = INavigableFragment(model) fragmentName = getattr(frag, 'fragmentName', None) if fragmentName is not None: fragDocFactory = self._getDocFactory(fragmentName) if fragDocFactory is not None: frag.docFactory = fragDocFactory if frag.docFactory is None: raise CouldNotLoadFromThemes(frag, self._preferredThemes()) useAthena = isinstance(frag, (athena.LiveFragment, athena.LiveElement)) return self._wrapNavFrag(frag, useAthena) else: return res
python
def wrapModel(self, model): """ Converts application-provided model objects to L{IResource} providers. """ res = IResource(model, None) if res is None: frag = INavigableFragment(model) fragmentName = getattr(frag, 'fragmentName', None) if fragmentName is not None: fragDocFactory = self._getDocFactory(fragmentName) if fragDocFactory is not None: frag.docFactory = fragDocFactory if frag.docFactory is None: raise CouldNotLoadFromThemes(frag, self._preferredThemes()) useAthena = isinstance(frag, (athena.LiveFragment, athena.LiveElement)) return self._wrapNavFrag(frag, useAthena) else: return res
[ "def", "wrapModel", "(", "self", ",", "model", ")", ":", "res", "=", "IResource", "(", "model", ",", "None", ")", "if", "res", "is", "None", ":", "frag", "=", "INavigableFragment", "(", "model", ")", "fragmentName", "=", "getattr", "(", "frag", ",", ...
Converts application-provided model objects to L{IResource} providers.
[ "Converts", "application", "-", "provided", "model", "objects", "to", "L", "{", "IResource", "}", "providers", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L65-L82
twisted/mantissa
xmantissa/_webutil.py
MantissaViewHelper.locateChild
def locateChild(self, ctx, segments): """ Attempt to locate the child via the '.fragment' attribute, then fall back to normal locateChild behavior. """ if self.fragment is not None: # There are still a bunch of bogus subclasses of this class, which # are used in a variety of distasteful ways. 'fragment' *should* # always be set to something that isn't None, but there's no way to # make sure that it will be for the moment. Every effort should be # made to reduce public use of subclasses of this class (instead # preferring to wrap content objects with # IWebViewer.wrapModel()), so that the above check can be # removed. -glyph lc = getattr(self.fragment, 'locateChild', None) if lc is not None: x = lc(ctx, segments) if x is not NotFound: return x return super(MantissaViewHelper, self).locateChild(ctx, segments)
python
def locateChild(self, ctx, segments): """ Attempt to locate the child via the '.fragment' attribute, then fall back to normal locateChild behavior. """ if self.fragment is not None: # There are still a bunch of bogus subclasses of this class, which # are used in a variety of distasteful ways. 'fragment' *should* # always be set to something that isn't None, but there's no way to # make sure that it will be for the moment. Every effort should be # made to reduce public use of subclasses of this class (instead # preferring to wrap content objects with # IWebViewer.wrapModel()), so that the above check can be # removed. -glyph lc = getattr(self.fragment, 'locateChild', None) if lc is not None: x = lc(ctx, segments) if x is not NotFound: return x return super(MantissaViewHelper, self).locateChild(ctx, segments)
[ "def", "locateChild", "(", "self", ",", "ctx", ",", "segments", ")", ":", "if", "self", ".", "fragment", "is", "not", "None", ":", "# There are still a bunch of bogus subclasses of this class, which", "# are used in a variety of distasteful ways. 'fragment' *should*", "# alw...
Attempt to locate the child via the '.fragment' attribute, then fall back to normal locateChild behavior.
[ "Attempt", "to", "locate", "the", "child", "via", "the", ".", "fragment", "attribute", "then", "fall", "back", "to", "normal", "locateChild", "behavior", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L99-L118
twisted/mantissa
xmantissa/_webutil.py
SiteRootMixin.locateChild
def locateChild(self, context, segments): """ Return a statically defined child or a child defined by a site root plugin or an avatar from guard. """ request = IRequest(context) webViewer = IWebViewer(self.store, None) childAndSegments = self.siteProduceResource(request, segments, webViewer) if childAndSegments is not None: return childAndSegments return NotFound
python
def locateChild(self, context, segments): """ Return a statically defined child or a child defined by a site root plugin or an avatar from guard. """ request = IRequest(context) webViewer = IWebViewer(self.store, None) childAndSegments = self.siteProduceResource(request, segments, webViewer) if childAndSegments is not None: return childAndSegments return NotFound
[ "def", "locateChild", "(", "self", ",", "context", ",", "segments", ")", ":", "request", "=", "IRequest", "(", "context", ")", "webViewer", "=", "IWebViewer", "(", "self", ".", "store", ",", "None", ")", "childAndSegments", "=", "self", ".", "siteProduceRe...
Return a statically defined child or a child defined by a site root plugin or an avatar from guard.
[ "Return", "a", "statically", "defined", "child", "or", "a", "child", "defined", "by", "a", "site", "root", "plugin", "or", "an", "avatar", "from", "guard", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L127-L137
twisted/mantissa
xmantissa/_webutil.py
SiteRootMixin.siteProduceResource
def siteProduceResource(self, req, segments, webViewer): """ Retrieve a child resource and segments from rootChild_ methods on this object and SiteRootPlugins. @return: a 2-tuple of (resource, segments), suitable for return from locateChild. @param req: an L{IRequest} provider. @param segments: a tuple of L{str}s, the segments from the request. @param webViewer: an L{IWebViewer}, to be propagated through the child lookup process. """ # rootChild_* is not the same as child_, because its signature is # different. Maybe this should be done some other way. shortcut = getattr(self, 'rootChild_' + segments[0], None) if shortcut: res = shortcut(req, webViewer) if res is not None: return res, segments[1:] for plg in self.store.powerupsFor(ISiteRootPlugin): produceResource = getattr(plg, 'produceResource', None) if produceResource is not None: childAndSegments = produceResource(req, segments, webViewer) else: childAndSegments = plg.resourceFactory(segments) if childAndSegments is not None: return childAndSegments return None
python
def siteProduceResource(self, req, segments, webViewer): """ Retrieve a child resource and segments from rootChild_ methods on this object and SiteRootPlugins. @return: a 2-tuple of (resource, segments), suitable for return from locateChild. @param req: an L{IRequest} provider. @param segments: a tuple of L{str}s, the segments from the request. @param webViewer: an L{IWebViewer}, to be propagated through the child lookup process. """ # rootChild_* is not the same as child_, because its signature is # different. Maybe this should be done some other way. shortcut = getattr(self, 'rootChild_' + segments[0], None) if shortcut: res = shortcut(req, webViewer) if res is not None: return res, segments[1:] for plg in self.store.powerupsFor(ISiteRootPlugin): produceResource = getattr(plg, 'produceResource', None) if produceResource is not None: childAndSegments = produceResource(req, segments, webViewer) else: childAndSegments = plg.resourceFactory(segments) if childAndSegments is not None: return childAndSegments return None
[ "def", "siteProduceResource", "(", "self", ",", "req", ",", "segments", ",", "webViewer", ")", ":", "# rootChild_* is not the same as child_, because its signature is", "# different. Maybe this should be done some other way.", "shortcut", "=", "getattr", "(", "self", ",", "'...
Retrieve a child resource and segments from rootChild_ methods on this object and SiteRootPlugins. @return: a 2-tuple of (resource, segments), suitable for return from locateChild. @param req: an L{IRequest} provider. @param segments: a tuple of L{str}s, the segments from the request. @param webViewer: an L{IWebViewer}, to be propagated through the child lookup process.
[ "Retrieve", "a", "child", "resource", "and", "segments", "from", "rootChild_", "methods", "on", "this", "object", "and", "SiteRootPlugins", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L141-L173
twisted/mantissa
xmantissa/_webutil.py
SiteRootMixin.indirect
def indirect(self, interface): """ Create a L{VirtualHostWrapper} so it can have the first chance to handle web requests. """ if interface is IResource: siteStore = self.store.parent if self.store.parent is None: siteStore = self.store return VirtualHostWrapper( siteStore, IWebViewer(self.store), self) return self
python
def indirect(self, interface): """ Create a L{VirtualHostWrapper} so it can have the first chance to handle web requests. """ if interface is IResource: siteStore = self.store.parent if self.store.parent is None: siteStore = self.store return VirtualHostWrapper( siteStore, IWebViewer(self.store), self) return self
[ "def", "indirect", "(", "self", ",", "interface", ")", ":", "if", "interface", "is", "IResource", ":", "siteStore", "=", "self", ".", "store", ".", "parent", "if", "self", ".", "store", ".", "parent", "is", "None", ":", "siteStore", "=", "self", ".", ...
Create a L{VirtualHostWrapper} so it can have the first chance to handle web requests.
[ "Create", "a", "L", "{", "VirtualHostWrapper", "}", "so", "it", "can", "have", "the", "first", "chance", "to", "handle", "web", "requests", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L177-L190
twisted/mantissa
xmantissa/_webutil.py
VirtualHostWrapper.subdomain
def subdomain(self, hostname): """ Determine of which known domain the given hostname is a subdomain. @return: A two-tuple giving the subdomain part and the domain part or C{None} if the domain is not a subdomain of any known domain. """ hostname = hostname.split(":")[0] for domain in getDomainNames(self.siteStore): if hostname.endswith("." + domain): username = hostname[:-len(domain) - 1] if username != "www": return username, domain return None
python
def subdomain(self, hostname): """ Determine of which known domain the given hostname is a subdomain. @return: A two-tuple giving the subdomain part and the domain part or C{None} if the domain is not a subdomain of any known domain. """ hostname = hostname.split(":")[0] for domain in getDomainNames(self.siteStore): if hostname.endswith("." + domain): username = hostname[:-len(domain) - 1] if username != "www": return username, domain return None
[ "def", "subdomain", "(", "self", ",", "hostname", ")", ":", "hostname", "=", "hostname", ".", "split", "(", "\":\"", ")", "[", "0", "]", "for", "domain", "in", "getDomainNames", "(", "self", ".", "siteStore", ")", ":", "if", "hostname", ".", "endswith"...
Determine of which known domain the given hostname is a subdomain. @return: A two-tuple giving the subdomain part and the domain part or C{None} if the domain is not a subdomain of any known domain.
[ "Determine", "of", "which", "known", "domain", "the", "given", "hostname", "is", "a", "subdomain", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L214-L227
twisted/mantissa
xmantissa/_webutil.py
VirtualHostWrapper.locateChild
def locateChild(self, context, segments): """ Delegate dispatch to a sharing resource if the request is for a user subdomain, otherwise fall back to the wrapped resource's C{locateChild} implementation. """ request = IRequest(context) hostname = request.getHeader('host') info = self.subdomain(hostname) if info is not None: username, domain = info index = UserIndexPage(IRealm(self.siteStore), self.webViewer) resource = index.locateChild(None, [username])[0] return resource, segments return self.wrapped.locateChild(context, segments)
python
def locateChild(self, context, segments): """ Delegate dispatch to a sharing resource if the request is for a user subdomain, otherwise fall back to the wrapped resource's C{locateChild} implementation. """ request = IRequest(context) hostname = request.getHeader('host') info = self.subdomain(hostname) if info is not None: username, domain = info index = UserIndexPage(IRealm(self.siteStore), self.webViewer) resource = index.locateChild(None, [username])[0] return resource, segments return self.wrapped.locateChild(context, segments)
[ "def", "locateChild", "(", "self", ",", "context", ",", "segments", ")", ":", "request", "=", "IRequest", "(", "context", ")", "hostname", "=", "request", ".", "getHeader", "(", "'host'", ")", "info", "=", "self", ".", "subdomain", "(", "hostname", ")", ...
Delegate dispatch to a sharing resource if the request is for a user subdomain, otherwise fall back to the wrapped resource's C{locateChild} implementation.
[ "Delegate", "dispatch", "to", "a", "sharing", "resource", "if", "the", "request", "is", "for", "a", "user", "subdomain", "otherwise", "fall", "back", "to", "the", "wrapped", "resource", "s", "C", "{", "locateChild", "}", "implementation", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/_webutil.py#L230-L246
twisted/mantissa
xmantissa/scrolltable.py
_webTranslator
def _webTranslator(store, fallback): """ Discover a web translator based on an Axiom store and a specified default. Prefer the specified default. This is an implementation detail of various initializers in this module which require an L{IWebTranslator} provider. Some of those initializers did not previously require a webTranslator, so this function will issue a L{UserWarning} if no L{IWebTranslator} powerup exists for the given store and no fallback is provided. @param store: an L{axiom.store.Store} @param fallback: a provider of L{IWebTranslator}, or None @return: 'fallback', if it is provided, or the L{IWebTranslator} powerup on 'store'. """ if fallback is None: fallback = IWebTranslator(store, None) if fallback is None: warnings.warn( "No IWebTranslator plugin when creating Scrolltable - broken " "configuration, now deprecated! Try passing webTranslator " "keyword argument.", category=DeprecationWarning, stacklevel=4) return fallback
python
def _webTranslator(store, fallback): """ Discover a web translator based on an Axiom store and a specified default. Prefer the specified default. This is an implementation detail of various initializers in this module which require an L{IWebTranslator} provider. Some of those initializers did not previously require a webTranslator, so this function will issue a L{UserWarning} if no L{IWebTranslator} powerup exists for the given store and no fallback is provided. @param store: an L{axiom.store.Store} @param fallback: a provider of L{IWebTranslator}, or None @return: 'fallback', if it is provided, or the L{IWebTranslator} powerup on 'store'. """ if fallback is None: fallback = IWebTranslator(store, None) if fallback is None: warnings.warn( "No IWebTranslator plugin when creating Scrolltable - broken " "configuration, now deprecated! Try passing webTranslator " "keyword argument.", category=DeprecationWarning, stacklevel=4) return fallback
[ "def", "_webTranslator", "(", "store", ",", "fallback", ")", ":", "if", "fallback", "is", "None", ":", "fallback", "=", "IWebTranslator", "(", "store", ",", "None", ")", "if", "fallback", "is", "None", ":", "warnings", ".", "warn", "(", "\"No IWebTranslato...
Discover a web translator based on an Axiom store and a specified default. Prefer the specified default. This is an implementation detail of various initializers in this module which require an L{IWebTranslator} provider. Some of those initializers did not previously require a webTranslator, so this function will issue a L{UserWarning} if no L{IWebTranslator} powerup exists for the given store and no fallback is provided. @param store: an L{axiom.store.Store} @param fallback: a provider of L{IWebTranslator}, or None @return: 'fallback', if it is provided, or the L{IWebTranslator} powerup on 'store'.
[ "Discover", "a", "web", "translator", "based", "on", "an", "Axiom", "store", "and", "a", "specified", "default", ".", "Prefer", "the", "specified", "default", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L321-L346
twisted/mantissa
xmantissa/scrolltable.py
_ScrollableBase.resort
def resort(self, columnName): """ Re-sort the table. @param columnName: the name of the column to sort by. This is a string because it is passed from the browser. """ csc = self.currentSortColumn newSortColumn = self.columns[columnName] if newSortColumn is None: raise Unsortable('column %r has no sort attribute' % (columnName,)) if csc is newSortColumn: self.isAscending = not self.isAscending else: self.currentSortColumn = newSortColumn self.isAscending = True return self.isAscending
python
def resort(self, columnName): """ Re-sort the table. @param columnName: the name of the column to sort by. This is a string because it is passed from the browser. """ csc = self.currentSortColumn newSortColumn = self.columns[columnName] if newSortColumn is None: raise Unsortable('column %r has no sort attribute' % (columnName,)) if csc is newSortColumn: self.isAscending = not self.isAscending else: self.currentSortColumn = newSortColumn self.isAscending = True return self.isAscending
[ "def", "resort", "(", "self", ",", "columnName", ")", ":", "csc", "=", "self", ".", "currentSortColumn", "newSortColumn", "=", "self", ".", "columns", "[", "columnName", "]", "if", "newSortColumn", "is", "None", ":", "raise", "Unsortable", "(", "'column %r h...
Re-sort the table. @param columnName: the name of the column to sort by. This is a string because it is passed from the browser.
[ "Re", "-", "sort", "the", "table", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L281-L297
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.inequalityQuery
def inequalityQuery(self, constraint, count, isAscending): """ Perform a query to obtain some rows from the table represented by this model, at the behest of a networked client. @param constraint: an additional constraint to apply to the query. @type constraint: L{axiom.iaxiom.IComparison}. @param count: the maximum number of rows to return. @type count: C{int} @param isAscending: a boolean describing whether the query should be yielding ascending or descending results. @type isAscending: C{bool} @return: an query which will yield some results from this model. @rtype: L{axiom.iaxiom.IQuery} """ if self.baseConstraint is not None: if constraint is not None: constraint = AND(self.baseConstraint, constraint) else: constraint = self.baseConstraint # build the sort currentSortAttribute = self.currentSortColumn.sortAttribute() if isAscending: sort = (currentSortAttribute.ascending, self.itemType.storeID.ascending) else: sort = (currentSortAttribute.descending, self.itemType.storeID.descending) return self.store.query(self.itemType, constraint, sort=sort, limit=count).distinct()
python
def inequalityQuery(self, constraint, count, isAscending): """ Perform a query to obtain some rows from the table represented by this model, at the behest of a networked client. @param constraint: an additional constraint to apply to the query. @type constraint: L{axiom.iaxiom.IComparison}. @param count: the maximum number of rows to return. @type count: C{int} @param isAscending: a boolean describing whether the query should be yielding ascending or descending results. @type isAscending: C{bool} @return: an query which will yield some results from this model. @rtype: L{axiom.iaxiom.IQuery} """ if self.baseConstraint is not None: if constraint is not None: constraint = AND(self.baseConstraint, constraint) else: constraint = self.baseConstraint # build the sort currentSortAttribute = self.currentSortColumn.sortAttribute() if isAscending: sort = (currentSortAttribute.ascending, self.itemType.storeID.ascending) else: sort = (currentSortAttribute.descending, self.itemType.storeID.descending) return self.store.query(self.itemType, constraint, sort=sort, limit=count).distinct()
[ "def", "inequalityQuery", "(", "self", ",", "constraint", ",", "count", ",", "isAscending", ")", ":", "if", "self", ".", "baseConstraint", "is", "not", "None", ":", "if", "constraint", "is", "not", "None", ":", "constraint", "=", "AND", "(", "self", ".",...
Perform a query to obtain some rows from the table represented by this model, at the behest of a networked client. @param constraint: an additional constraint to apply to the query. @type constraint: L{axiom.iaxiom.IComparison}. @param count: the maximum number of rows to return. @type count: C{int} @param isAscending: a boolean describing whether the query should be yielding ascending or descending results. @type isAscending: C{bool} @return: an query which will yield some results from this model. @rtype: L{axiom.iaxiom.IQuery}
[ "Perform", "a", "query", "to", "obtain", "some", "rows", "from", "the", "table", "represented", "by", "this", "model", "at", "the", "behest", "of", "a", "networked", "client", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L409-L443
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.rowsAfterValue
def rowsAfterValue(self, value, count): """ Retrieve some rows at or after a given sort-column value. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is greater than or equal to this value will be returned. @type value: Some type compatible with the current sort column, or None, to specify the beginning of the data. @param count: The maximum number of rows to return. @type count: C{int} @return: A list of row data, ordered by the current sort column, beginning at C{value} and containing at most C{count} elements. """ if value is None: query = self.inequalityQuery(None, count, True) else: pyvalue = self._toComparableValue(value) currentSortAttribute = self.currentSortColumn.sortAttribute() query = self.inequalityQuery(currentSortAttribute >= pyvalue, count, True) return self.constructRows(query)
python
def rowsAfterValue(self, value, count): """ Retrieve some rows at or after a given sort-column value. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is greater than or equal to this value will be returned. @type value: Some type compatible with the current sort column, or None, to specify the beginning of the data. @param count: The maximum number of rows to return. @type count: C{int} @return: A list of row data, ordered by the current sort column, beginning at C{value} and containing at most C{count} elements. """ if value is None: query = self.inequalityQuery(None, count, True) else: pyvalue = self._toComparableValue(value) currentSortAttribute = self.currentSortColumn.sortAttribute() query = self.inequalityQuery(currentSortAttribute >= pyvalue, count, True) return self.constructRows(query)
[ "def", "rowsAfterValue", "(", "self", ",", "value", ",", "count", ")", ":", "if", "value", "is", "None", ":", "query", "=", "self", ".", "inequalityQuery", "(", "None", ",", "count", ",", "True", ")", "else", ":", "pyvalue", "=", "self", ".", "_toCom...
Retrieve some rows at or after a given sort-column value. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is greater than or equal to this value will be returned. @type value: Some type compatible with the current sort column, or None, to specify the beginning of the data. @param count: The maximum number of rows to return. @type count: C{int} @return: A list of row data, ordered by the current sort column, beginning at C{value} and containing at most C{count} elements.
[ "Retrieve", "some", "rows", "at", "or", "after", "a", "given", "sort", "-", "column", "value", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L446-L470
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.rowsAfterRow
def rowsAfterRow(self, rowObject, count): """ Wrapper around L{rowsAfterItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return. """ webID = rowObject['__id__'] return self.rowsAfterItem( self.webTranslator.fromWebID(webID), count)
python
def rowsAfterRow(self, rowObject, count): """ Wrapper around L{rowsAfterItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return. """ webID = rowObject['__id__'] return self.rowsAfterItem( self.webTranslator.fromWebID(webID), count)
[ "def", "rowsAfterRow", "(", "self", ",", "rowObject", ",", "count", ")", ":", "webID", "=", "rowObject", "[", "'__id__'", "]", "return", "self", ".", "rowsAfterItem", "(", "self", ".", "webTranslator", ".", "fromWebID", "(", "webID", ")", ",", "count", "...
Wrapper around L{rowsAfterItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return.
[ "Wrapper", "around", "L", "{", "rowsAfterItem", "}", "which", "accepts", "the", "web", "ID", "for", "a", "item", "instead", "of", "the", "item", "itself", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L503-L517
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.rowsBeforeRow
def rowsBeforeRow(self, rowObject, count): """ Wrapper around L{rowsBeforeItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return. """ webID = rowObject['__id__'] return self.rowsBeforeItem( self.webTranslator.fromWebID(webID), count)
python
def rowsBeforeRow(self, rowObject, count): """ Wrapper around L{rowsBeforeItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return. """ webID = rowObject['__id__'] return self.rowsBeforeItem( self.webTranslator.fromWebID(webID), count)
[ "def", "rowsBeforeRow", "(", "self", ",", "rowObject", ",", "count", ")", ":", "webID", "=", "rowObject", "[", "'__id__'", "]", "return", "self", ".", "rowsBeforeItem", "(", "self", ".", "webTranslator", ".", "fromWebID", "(", "webID", ")", ",", "count", ...
Wrapper around L{rowsBeforeItem} which accepts the web ID for a item instead of the item itself. @param rowObject: a dictionary mapping strings to column values, sent from the client. One of those column values must be C{__id__} to uniquely identify a row. @param count: an integer, the number of rows to return.
[ "Wrapper", "around", "L", "{", "rowsBeforeItem", "}", "which", "accepts", "the", "web", "ID", "for", "a", "item", "instead", "of", "the", "item", "itself", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L521-L535
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel._toComparableValue
def _toComparableValue(self, value): """ Trivial wrapper which takes into account the possibility that our sort column might not have defined the C{toComparableValue} method. This can probably serve as a good generic template for some infrastructure to deal with arbitrarily-potentially-missing methods from certain versions of interfaces, but we didn't take it any further than it needed to go for this system's fairly meagre requirements. *Please* feel free to refactor upwards as necessary. """ if hasattr(self.currentSortColumn, 'toComparableValue'): return self.currentSortColumn.toComparableValue(value) # Retrieve the location of the class's definition so that we can alert # the user as to where they need to insert their implementation. classDef = self.currentSortColumn.__class__ filename = inspect.getsourcefile(classDef) lineno = inspect.findsource(classDef)[1] warnings.warn_explicit( "IColumn implementor " + qual(self.currentSortColumn.__class__) + " " "does not implement method toComparableValue. This is required since " "Mantissa 0.6.6.", DeprecationWarning, filename, lineno) return value
python
def _toComparableValue(self, value): """ Trivial wrapper which takes into account the possibility that our sort column might not have defined the C{toComparableValue} method. This can probably serve as a good generic template for some infrastructure to deal with arbitrarily-potentially-missing methods from certain versions of interfaces, but we didn't take it any further than it needed to go for this system's fairly meagre requirements. *Please* feel free to refactor upwards as necessary. """ if hasattr(self.currentSortColumn, 'toComparableValue'): return self.currentSortColumn.toComparableValue(value) # Retrieve the location of the class's definition so that we can alert # the user as to where they need to insert their implementation. classDef = self.currentSortColumn.__class__ filename = inspect.getsourcefile(classDef) lineno = inspect.findsource(classDef)[1] warnings.warn_explicit( "IColumn implementor " + qual(self.currentSortColumn.__class__) + " " "does not implement method toComparableValue. This is required since " "Mantissa 0.6.6.", DeprecationWarning, filename, lineno) return value
[ "def", "_toComparableValue", "(", "self", ",", "value", ")", ":", "if", "hasattr", "(", "self", ".", "currentSortColumn", ",", "'toComparableValue'", ")", ":", "return", "self", ".", "currentSortColumn", ".", "toComparableValue", "(", "value", ")", "# Retrieve t...
Trivial wrapper which takes into account the possibility that our sort column might not have defined the C{toComparableValue} method. This can probably serve as a good generic template for some infrastructure to deal with arbitrarily-potentially-missing methods from certain versions of interfaces, but we didn't take it any further than it needed to go for this system's fairly meagre requirements. *Please* feel free to refactor upwards as necessary.
[ "Trivial", "wrapper", "which", "takes", "into", "account", "the", "possibility", "that", "our", "sort", "column", "might", "not", "have", "defined", "the", "C", "{", "toComparableValue", "}", "method", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L539-L562
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.rowsBeforeValue
def rowsBeforeValue(self, value, count): """ Retrieve display data for rows with sort-column values less than the given value. @type value: Some type compatible with the current sort column. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is less than this value will be returned. @type count: C{int} @param count: The number of rows to return. @return: A list of row data, ordered by the current sort column, ending at C{value} and containing at most C{count} elements. """ if value is None: query = self.inequalityQuery(None, count, False) else: pyvalue = self._toComparableValue(value) currentSortAttribute = self.currentSortColumn.sortAttribute() query = self.inequalityQuery( currentSortAttribute < pyvalue, count, False) return self.constructRows(query)[::-1]
python
def rowsBeforeValue(self, value, count): """ Retrieve display data for rows with sort-column values less than the given value. @type value: Some type compatible with the current sort column. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is less than this value will be returned. @type count: C{int} @param count: The number of rows to return. @return: A list of row data, ordered by the current sort column, ending at C{value} and containing at most C{count} elements. """ if value is None: query = self.inequalityQuery(None, count, False) else: pyvalue = self._toComparableValue(value) currentSortAttribute = self.currentSortColumn.sortAttribute() query = self.inequalityQuery( currentSortAttribute < pyvalue, count, False) return self.constructRows(query)[::-1]
[ "def", "rowsBeforeValue", "(", "self", ",", "value", ",", "count", ")", ":", "if", "value", "is", "None", ":", "query", "=", "self", ".", "inequalityQuery", "(", "None", ",", "count", ",", "False", ")", "else", ":", "pyvalue", "=", "self", ".", "_toC...
Retrieve display data for rows with sort-column values less than the given value. @type value: Some type compatible with the current sort column. @param value: Starting value in the index for the current sort column at which to start returning results. Rows with a column value for the current sort column which is less than this value will be returned. @type count: C{int} @param count: The number of rows to return. @return: A list of row data, ordered by the current sort column, ending at C{value} and containing at most C{count} elements.
[ "Retrieve", "display", "data", "for", "rows", "with", "sort", "-", "column", "values", "less", "than", "the", "given", "value", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L565-L588
twisted/mantissa
xmantissa/scrolltable.py
InequalityModel.rowsBeforeItem
def rowsBeforeItem(self, item, count): """ The inverse of rowsAfterItem. @param item: then L{Item} to request rows before. @type item: this L{InequalityModel}'s L{itemType} attribute. @param count: The maximum number of rows to return. @type count: L{int} @return: A list of row data, ordered by the current sort column, beginning immediately after C{item}. """ currentSortAttribute = self.currentSortColumn.sortAttribute() value = currentSortAttribute.__get__(item, type(item)) firstQuery = self.inequalityQuery( AND(currentSortAttribute == value, self.itemType.storeID < item.storeID), count, False) results = self.constructRows(firstQuery) count -= len(results) if count: secondQuery = self.inequalityQuery(currentSortAttribute < value, count, False) results.extend(self.constructRows(secondQuery)) return results[::-1]
python
def rowsBeforeItem(self, item, count): """ The inverse of rowsAfterItem. @param item: then L{Item} to request rows before. @type item: this L{InequalityModel}'s L{itemType} attribute. @param count: The maximum number of rows to return. @type count: L{int} @return: A list of row data, ordered by the current sort column, beginning immediately after C{item}. """ currentSortAttribute = self.currentSortColumn.sortAttribute() value = currentSortAttribute.__get__(item, type(item)) firstQuery = self.inequalityQuery( AND(currentSortAttribute == value, self.itemType.storeID < item.storeID), count, False) results = self.constructRows(firstQuery) count -= len(results) if count: secondQuery = self.inequalityQuery(currentSortAttribute < value, count, False) results.extend(self.constructRows(secondQuery)) return results[::-1]
[ "def", "rowsBeforeItem", "(", "self", ",", "item", ",", "count", ")", ":", "currentSortAttribute", "=", "self", ".", "currentSortColumn", ".", "sortAttribute", "(", ")", "value", "=", "currentSortAttribute", ".", "__get__", "(", "item", ",", "type", "(", "it...
The inverse of rowsAfterItem. @param item: then L{Item} to request rows before. @type item: this L{InequalityModel}'s L{itemType} attribute. @param count: The maximum number of rows to return. @type count: L{int} @return: A list of row data, ordered by the current sort column, beginning immediately after C{item}.
[ "The", "inverse", "of", "rowsAfterItem", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L592-L617
twisted/mantissa
xmantissa/scrolltable.py
IndexingModel.requestRowRange
def requestRowRange(self, rangeBegin, rangeEnd): """ Retrieve display data for the given range of rows. @type rangeBegin: C{int} @param rangeBegin: The index of the first row to retrieve. @type rangeEnd: C{int} @param rangeEnd: The index of the last row to retrieve. @return: A C{list} of C{dict}s giving row data. """ return self.constructRows(self.performQuery(rangeBegin, rangeEnd))
python
def requestRowRange(self, rangeBegin, rangeEnd): """ Retrieve display data for the given range of rows. @type rangeBegin: C{int} @param rangeBegin: The index of the first row to retrieve. @type rangeEnd: C{int} @param rangeEnd: The index of the last row to retrieve. @return: A C{list} of C{dict}s giving row data. """ return self.constructRows(self.performQuery(rangeBegin, rangeEnd))
[ "def", "requestRowRange", "(", "self", ",", "rangeBegin", ",", "rangeEnd", ")", ":", "return", "self", ".", "constructRows", "(", "self", ".", "performQuery", "(", "rangeBegin", ",", "rangeEnd", ")", ")" ]
Retrieve display data for the given range of rows. @type rangeBegin: C{int} @param rangeBegin: The index of the first row to retrieve. @type rangeEnd: C{int} @param rangeEnd: The index of the last row to retrieve. @return: A C{list} of C{dict}s giving row data.
[ "Retrieve", "display", "data", "for", "the", "given", "range", "of", "rows", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L639-L651
twisted/mantissa
xmantissa/scrolltable.py
IndexingModel.getTableMetadata
def getTableMetadata(self): """ Retrieve a description of the various properties of this scrolltable. @return: A sequence containing 5 elements. They are, in order, a list of the names of the columns present, a mapping of column names to two-tuples of their type and a boolean indicating their sortability, the total number of rows in the scrolltable, the name of the default sort column, and a boolean indicating whether or not the current sort order is ascending. """ coltypes = {} for (colname, column) in self.columns.iteritems(): sortable = column.sortAttribute() is not None coltype = column.getType() if coltype is not None: coltype = unicode(coltype, 'ascii') coltypes[colname] = (coltype, sortable) if self.currentSortColumn: csc = unicode(self.currentSortColumn.sortAttribute().attrname, 'ascii') else: csc = None return [self.columnNames, coltypes, self.requestCurrentSize(), csc, self.isAscending]
python
def getTableMetadata(self): """ Retrieve a description of the various properties of this scrolltable. @return: A sequence containing 5 elements. They are, in order, a list of the names of the columns present, a mapping of column names to two-tuples of their type and a boolean indicating their sortability, the total number of rows in the scrolltable, the name of the default sort column, and a boolean indicating whether or not the current sort order is ascending. """ coltypes = {} for (colname, column) in self.columns.iteritems(): sortable = column.sortAttribute() is not None coltype = column.getType() if coltype is not None: coltype = unicode(coltype, 'ascii') coltypes[colname] = (coltype, sortable) if self.currentSortColumn: csc = unicode(self.currentSortColumn.sortAttribute().attrname, 'ascii') else: csc = None return [self.columnNames, coltypes, self.requestCurrentSize(), csc, self.isAscending]
[ "def", "getTableMetadata", "(", "self", ")", ":", "coltypes", "=", "{", "}", "for", "(", "colname", ",", "column", ")", "in", "self", ".", "columns", ".", "iteritems", "(", ")", ":", "sortable", "=", "column", ".", "sortAttribute", "(", ")", "is", "n...
Retrieve a description of the various properties of this scrolltable. @return: A sequence containing 5 elements. They are, in order, a list of the names of the columns present, a mapping of column names to two-tuples of their type and a boolean indicating their sortability, the total number of rows in the scrolltable, the name of the default sort column, and a boolean indicating whether or not the current sort order is ascending.
[ "Retrieve", "a", "description", "of", "the", "various", "properties", "of", "this", "scrolltable", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L656-L681
twisted/mantissa
xmantissa/scrolltable.py
ScrollableView.constructRows
def constructRows(self, items): """ Build row objects that are serializable using Athena for sending to the client. @param items: an iterable of objects compatible with my columns' C{extractValue} methods. @return: a list of dictionaries, where each dictionary has a string key for each column name in my list of columns. """ rows = [] for item in items: row = dict((colname, col.extractValue(self, item)) for (colname, col) in self.columns.iteritems()) link = self.linkToItem(item) if link is not None: row[u'__id__'] = link rows.append(row) return rows
python
def constructRows(self, items): """ Build row objects that are serializable using Athena for sending to the client. @param items: an iterable of objects compatible with my columns' C{extractValue} methods. @return: a list of dictionaries, where each dictionary has a string key for each column name in my list of columns. """ rows = [] for item in items: row = dict((colname, col.extractValue(self, item)) for (colname, col) in self.columns.iteritems()) link = self.linkToItem(item) if link is not None: row[u'__id__'] = link rows.append(row) return rows
[ "def", "constructRows", "(", "self", ",", "items", ")", ":", "rows", "=", "[", "]", "for", "item", "in", "items", ":", "row", "=", "dict", "(", "(", "colname", ",", "col", ".", "extractValue", "(", "self", ",", "item", ")", ")", "for", "(", "coln...
Build row objects that are serializable using Athena for sending to the client. @param items: an iterable of objects compatible with my columns' C{extractValue} methods. @return: a list of dictionaries, where each dictionary has a string key for each column name in my list of columns.
[ "Build", "row", "objects", "that", "are", "serializable", "using", "Athena", "for", "sending", "to", "the", "client", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L734-L754
twisted/mantissa
xmantissa/scrolltable.py
ScrollingElement._getColumnList
def _getColumnList(self): """ Get a list of serializable objects that describe the interesting columns on our item type. Columns which report having no type will be treated as having the type I{text}. @rtype: C{list} of C{dict} """ columnList = [] for columnName in self.columnNames: column = self.columns[columnName] type = column.getType() if type is None: type = 'text' columnList.append( {u'name': columnName, u'type': type.decode('ascii')}) return columnList
python
def _getColumnList(self): """ Get a list of serializable objects that describe the interesting columns on our item type. Columns which report having no type will be treated as having the type I{text}. @rtype: C{list} of C{dict} """ columnList = [] for columnName in self.columnNames: column = self.columns[columnName] type = column.getType() if type is None: type = 'text' columnList.append( {u'name': columnName, u'type': type.decode('ascii')}) return columnList
[ "def", "_getColumnList", "(", "self", ")", ":", "columnList", "=", "[", "]", "for", "columnName", "in", "self", ".", "columnNames", ":", "column", "=", "self", ".", "columns", "[", "columnName", "]", "type", "=", "column", ".", "getType", "(", ")", "if...
Get a list of serializable objects that describe the interesting columns on our item type. Columns which report having no type will be treated as having the type I{text}. @rtype: C{list} of C{dict}
[ "Get", "a", "list", "of", "serializable", "objects", "that", "describe", "the", "interesting", "columns", "on", "our", "item", "type", ".", "Columns", "which", "report", "having", "no", "type", "will", "be", "treated", "as", "having", "the", "type", "I", "...
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L923-L940
twisted/mantissa
xmantissa/scrolltable.py
ScrollingElement.getInitialArguments
def getInitialArguments(self): """ Return the constructor arguments required for the JavaScript client class, Mantissa.ScrollTable.ScrollTable. @return: a 3-tuple of:: - The unicode attribute ID of my current sort column - A list of dictionaries with 'name' and 'type' keys which are strings describing the name and type of all the columns in this table. - A bool indicating whether the sort direction is initially ascending. """ ic = IColumn(self.currentSortColumn) return [ic.attributeID.decode('ascii'), self._getColumnList(), self.isAscending]
python
def getInitialArguments(self): """ Return the constructor arguments required for the JavaScript client class, Mantissa.ScrollTable.ScrollTable. @return: a 3-tuple of:: - The unicode attribute ID of my current sort column - A list of dictionaries with 'name' and 'type' keys which are strings describing the name and type of all the columns in this table. - A bool indicating whether the sort direction is initially ascending. """ ic = IColumn(self.currentSortColumn) return [ic.attributeID.decode('ascii'), self._getColumnList(), self.isAscending]
[ "def", "getInitialArguments", "(", "self", ")", ":", "ic", "=", "IColumn", "(", "self", ".", "currentSortColumn", ")", "return", "[", "ic", ".", "attributeID", ".", "decode", "(", "'ascii'", ")", ",", "self", ".", "_getColumnList", "(", ")", ",", "self",...
Return the constructor arguments required for the JavaScript client class, Mantissa.ScrollTable.ScrollTable. @return: a 3-tuple of:: - The unicode attribute ID of my current sort column - A list of dictionaries with 'name' and 'type' keys which are strings describing the name and type of all the columns in this table. - A bool indicating whether the sort direction is initially ascending.
[ "Return", "the", "constructor", "arguments", "required", "for", "the", "JavaScript", "client", "class", "Mantissa", ".", "ScrollTable", ".", "ScrollTable", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/scrolltable.py#L943-L960
KeplerGO/K2fov
K2fov/K2findCampaigns.py
printChannelColRow
def printChannelColRow(campaign, ra, dec): """Prints the channel, col, row for a given campaign and coordinate.""" fovobj = fields.getKeplerFov(campaign) ch, col, row = fovobj.getChannelColRow(ra, dec) print("Position in C{}: channel {}, col {:.0f}, row {:.0f}.".format(campaign, int(ch), col, row))
python
def printChannelColRow(campaign, ra, dec): """Prints the channel, col, row for a given campaign and coordinate.""" fovobj = fields.getKeplerFov(campaign) ch, col, row = fovobj.getChannelColRow(ra, dec) print("Position in C{}: channel {}, col {:.0f}, row {:.0f}.".format(campaign, int(ch), col, row))
[ "def", "printChannelColRow", "(", "campaign", ",", "ra", ",", "dec", ")", ":", "fovobj", "=", "fields", ".", "getKeplerFov", "(", "campaign", ")", "ch", ",", "col", ",", "row", "=", "fovobj", ".", "getChannelColRow", "(", "ra", ",", "dec", ")", "print"...
Prints the channel, col, row for a given campaign and coordinate.
[ "Prints", "the", "channel", "col", "row", "for", "a", "given", "campaign", "and", "coordinate", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L17-L21
KeplerGO/K2fov
K2fov/K2findCampaigns.py
findCampaigns
def findCampaigns(ra, dec): """Returns a list of the campaigns that cover a given position. Parameters ---------- ra, dec : float, float Position in decimal degrees (J2000). Returns ------- campaigns : list of int A list of the campaigns that cover the given position. """ # Temporary disable the logger to avoid the preliminary field warnings logger.disabled = True campaigns_visible = [] for c in fields.getFieldNumbers(): fovobj = fields.getKeplerFov(c) if onSiliconCheck(ra, dec, fovobj): campaigns_visible.append(c) # Re-enable the logger logger.disabled = True return campaigns_visible
python
def findCampaigns(ra, dec): """Returns a list of the campaigns that cover a given position. Parameters ---------- ra, dec : float, float Position in decimal degrees (J2000). Returns ------- campaigns : list of int A list of the campaigns that cover the given position. """ # Temporary disable the logger to avoid the preliminary field warnings logger.disabled = True campaigns_visible = [] for c in fields.getFieldNumbers(): fovobj = fields.getKeplerFov(c) if onSiliconCheck(ra, dec, fovobj): campaigns_visible.append(c) # Re-enable the logger logger.disabled = True return campaigns_visible
[ "def", "findCampaigns", "(", "ra", ",", "dec", ")", ":", "# Temporary disable the logger to avoid the preliminary field warnings", "logger", ".", "disabled", "=", "True", "campaigns_visible", "=", "[", "]", "for", "c", "in", "fields", ".", "getFieldNumbers", "(", ")...
Returns a list of the campaigns that cover a given position. Parameters ---------- ra, dec : float, float Position in decimal degrees (J2000). Returns ------- campaigns : list of int A list of the campaigns that cover the given position.
[ "Returns", "a", "list", "of", "the", "campaigns", "that", "cover", "a", "given", "position", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L24-L46
KeplerGO/K2fov
K2fov/K2findCampaigns.py
findCampaignsByName
def findCampaignsByName(target): """Returns a list of the campaigns that cover a given target. Parameters ---------- target : str Name of the celestial object. Returns ------- campaigns : list of int A list of the campaigns that cover the given target name. ra, dec : float, float Resolved coordinates in decimal degrees (J2000). Exceptions ---------- Raises an ImportError if AstroPy is not installed. Raises a ValueError if `name` cannot be resolved to coordinates. """ # Is AstroPy (optional dependency) installed? try: from astropy.coordinates import SkyCoord from astropy.coordinates.name_resolve import NameResolveError from astropy.utils.data import conf conf.remote_timeout = 90 except ImportError: print('Error: AstroPy needs to be installed for this feature.') sys.exit(1) # Translate the target name into celestial coordinates try: crd = SkyCoord.from_name(target) except NameResolveError: raise ValueError('Could not find coordinates ' 'for target "{0}".'.format(target)) # Find the campaigns with visibility return findCampaigns(crd.ra.deg, crd.dec.deg), crd.ra.deg, crd.dec.deg
python
def findCampaignsByName(target): """Returns a list of the campaigns that cover a given target. Parameters ---------- target : str Name of the celestial object. Returns ------- campaigns : list of int A list of the campaigns that cover the given target name. ra, dec : float, float Resolved coordinates in decimal degrees (J2000). Exceptions ---------- Raises an ImportError if AstroPy is not installed. Raises a ValueError if `name` cannot be resolved to coordinates. """ # Is AstroPy (optional dependency) installed? try: from astropy.coordinates import SkyCoord from astropy.coordinates.name_resolve import NameResolveError from astropy.utils.data import conf conf.remote_timeout = 90 except ImportError: print('Error: AstroPy needs to be installed for this feature.') sys.exit(1) # Translate the target name into celestial coordinates try: crd = SkyCoord.from_name(target) except NameResolveError: raise ValueError('Could not find coordinates ' 'for target "{0}".'.format(target)) # Find the campaigns with visibility return findCampaigns(crd.ra.deg, crd.dec.deg), crd.ra.deg, crd.dec.deg
[ "def", "findCampaignsByName", "(", "target", ")", ":", "# Is AstroPy (optional dependency) installed?", "try", ":", "from", "astropy", ".", "coordinates", "import", "SkyCoord", "from", "astropy", ".", "coordinates", ".", "name_resolve", "import", "NameResolveError", "fr...
Returns a list of the campaigns that cover a given target. Parameters ---------- target : str Name of the celestial object. Returns ------- campaigns : list of int A list of the campaigns that cover the given target name. ra, dec : float, float Resolved coordinates in decimal degrees (J2000). Exceptions ---------- Raises an ImportError if AstroPy is not installed. Raises a ValueError if `name` cannot be resolved to coordinates.
[ "Returns", "a", "list", "of", "the", "campaigns", "that", "cover", "a", "given", "target", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L49-L86
KeplerGO/K2fov
K2fov/K2findCampaigns.py
K2findCampaigns_main
def K2findCampaigns_main(args=None): """Exposes K2findCampaigns to the command line.""" parser = argparse.ArgumentParser( description="Check if a celestial coordinate is " "(or was) observable by any past or future " "observing campaign of NASA's K2 mission.") parser.add_argument('ra', nargs=1, type=float, help="Right Ascension in decimal degrees (J2000).") parser.add_argument('dec', nargs=1, type=float, help="Declination in decimal degrees (J2000).") parser.add_argument('-p', '--plot', action='store_true', help="Produce a plot showing the target position " "with respect to all K2 campaigns.") args = parser.parse_args(args) ra, dec = args.ra[0], args.dec[0] campaigns = findCampaigns(ra, dec) # Print the result if len(campaigns): print(Highlight.GREEN + "Success! The target is on silicon " "during K2 campaigns {0}.".format(campaigns) + Highlight.END) else: print(Highlight.RED + "Sorry, the target is not on silicon " "during any K2 campaign." + Highlight.END) # Print the pixel positions for c in campaigns: printChannelColRow(c, ra, dec) # Make a context plot if the user requested so if args.plot: save_context_plots(ra, dec, "Your object")
python
def K2findCampaigns_main(args=None): """Exposes K2findCampaigns to the command line.""" parser = argparse.ArgumentParser( description="Check if a celestial coordinate is " "(or was) observable by any past or future " "observing campaign of NASA's K2 mission.") parser.add_argument('ra', nargs=1, type=float, help="Right Ascension in decimal degrees (J2000).") parser.add_argument('dec', nargs=1, type=float, help="Declination in decimal degrees (J2000).") parser.add_argument('-p', '--plot', action='store_true', help="Produce a plot showing the target position " "with respect to all K2 campaigns.") args = parser.parse_args(args) ra, dec = args.ra[0], args.dec[0] campaigns = findCampaigns(ra, dec) # Print the result if len(campaigns): print(Highlight.GREEN + "Success! The target is on silicon " "during K2 campaigns {0}.".format(campaigns) + Highlight.END) else: print(Highlight.RED + "Sorry, the target is not on silicon " "during any K2 campaign." + Highlight.END) # Print the pixel positions for c in campaigns: printChannelColRow(c, ra, dec) # Make a context plot if the user requested so if args.plot: save_context_plots(ra, dec, "Your object")
[ "def", "K2findCampaigns_main", "(", "args", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Check if a celestial coordinate is \"", "\"(or was) observable by any past or future \"", "\"observing campaign of NASA's K2 mission.\"...
Exposes K2findCampaigns to the command line.
[ "Exposes", "K2findCampaigns", "to", "the", "command", "line", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L102-L130
KeplerGO/K2fov
K2fov/K2findCampaigns.py
K2findCampaigns_byname_main
def K2findCampaigns_byname_main(args=None): """Exposes K2findCampaigns to the command line.""" parser = argparse.ArgumentParser( description="Check if a target is " "(or was) observable by any past or future " "observing campaign of NASA's K2 mission.") parser.add_argument('name', nargs=1, type=str, help="Name of the object. This will be passed on " "to the CDS name resolver " "to retrieve coordinate information.") parser.add_argument('-p', '--plot', action='store_true', help="Produce a plot showing the target position " "with respect to all K2 campaigns.") args = parser.parse_args(args) targetname = args.name[0] try: campaigns, ra, dec = findCampaignsByName(targetname) except ValueError: print("Error: could not retrieve coordinates for {0}.".format(targetname)) print("The target may be unknown or there may be a problem " "connecting to the coordinate server.") sys.exit(1) # Print the result if len(campaigns): print(Highlight.GREEN + "Success! {0} is on silicon ".format(targetname) + "during K2 campaigns {0}.".format(campaigns) + Highlight.END) else: print(Highlight.RED + "Sorry, {} is not on silicon " "during any K2 campaign.".format(targetname) + Highlight.END) # Print the pixel positions for c in campaigns: printChannelColRow(c, ra, dec) # Make a context plot if the user requested so if args.plot: save_context_plots(ra, dec, targetname=targetname)
python
def K2findCampaigns_byname_main(args=None): """Exposes K2findCampaigns to the command line.""" parser = argparse.ArgumentParser( description="Check if a target is " "(or was) observable by any past or future " "observing campaign of NASA's K2 mission.") parser.add_argument('name', nargs=1, type=str, help="Name of the object. This will be passed on " "to the CDS name resolver " "to retrieve coordinate information.") parser.add_argument('-p', '--plot', action='store_true', help="Produce a plot showing the target position " "with respect to all K2 campaigns.") args = parser.parse_args(args) targetname = args.name[0] try: campaigns, ra, dec = findCampaignsByName(targetname) except ValueError: print("Error: could not retrieve coordinates for {0}.".format(targetname)) print("The target may be unknown or there may be a problem " "connecting to the coordinate server.") sys.exit(1) # Print the result if len(campaigns): print(Highlight.GREEN + "Success! {0} is on silicon ".format(targetname) + "during K2 campaigns {0}.".format(campaigns) + Highlight.END) else: print(Highlight.RED + "Sorry, {} is not on silicon " "during any K2 campaign.".format(targetname) + Highlight.END) # Print the pixel positions for c in campaigns: printChannelColRow(c, ra, dec) # Make a context plot if the user requested so if args.plot: save_context_plots(ra, dec, targetname=targetname)
[ "def", "K2findCampaigns_byname_main", "(", "args", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Check if a target is \"", "\"(or was) observable by any past or future \"", "\"observing campaign of NASA's K2 mission.\"", ")...
Exposes K2findCampaigns to the command line.
[ "Exposes", "K2findCampaigns", "to", "the", "command", "line", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L133-L169
KeplerGO/K2fov
K2fov/K2findCampaigns.py
K2findCampaigns_csv_main
def K2findCampaigns_csv_main(args=None): """Exposes K2findCampaigns-csv to the command line.""" parser = argparse.ArgumentParser( description="Check which objects listed in a CSV table " "are (or were) observable by NASA's K2 mission.") parser.add_argument('input_filename', nargs=1, type=str, help="Path to a comma-separated table containing " "columns 'ra,dec,kepmag' (decimal degrees) " "or 'name'.") args = parser.parse_args(args) input_fn = args.input_filename[0] output_fn = input_fn + '-K2findCampaigns.csv' # First, try assuming the file has the classic "ra,dec,kepmag" format try: ra, dec, kepmag = parse_file(input_fn, exit_on_error=False) campaigns = np.array([findCampaigns(ra[idx], dec[idx]) for idx in range(len(ra))]) output = np.array([ra, dec, kepmag, campaigns]) print("Writing {0}".format(output_fn)) np.savetxt(output_fn, output.T, delimiter=', ', fmt=['%10.10f', '%10.10f', '%10.2f', '%s']) # If this fails, assume the file has a single "name" column except ValueError: names = [name.strip() for name in open(input_fn, "r").readlines() if len(name.strip()) > 0] print("Writing {0}".format(output_fn)) output = open(output_fn, "w") for target in names: try: campaigns, ra, dec = findCampaignsByName(target) except ValueError: campaigns = [] output.write("{0}, {1}\n".format(target, campaigns)) output.flush() output.close()
python
def K2findCampaigns_csv_main(args=None): """Exposes K2findCampaigns-csv to the command line.""" parser = argparse.ArgumentParser( description="Check which objects listed in a CSV table " "are (or were) observable by NASA's K2 mission.") parser.add_argument('input_filename', nargs=1, type=str, help="Path to a comma-separated table containing " "columns 'ra,dec,kepmag' (decimal degrees) " "or 'name'.") args = parser.parse_args(args) input_fn = args.input_filename[0] output_fn = input_fn + '-K2findCampaigns.csv' # First, try assuming the file has the classic "ra,dec,kepmag" format try: ra, dec, kepmag = parse_file(input_fn, exit_on_error=False) campaigns = np.array([findCampaigns(ra[idx], dec[idx]) for idx in range(len(ra))]) output = np.array([ra, dec, kepmag, campaigns]) print("Writing {0}".format(output_fn)) np.savetxt(output_fn, output.T, delimiter=', ', fmt=['%10.10f', '%10.10f', '%10.2f', '%s']) # If this fails, assume the file has a single "name" column except ValueError: names = [name.strip() for name in open(input_fn, "r").readlines() if len(name.strip()) > 0] print("Writing {0}".format(output_fn)) output = open(output_fn, "w") for target in names: try: campaigns, ra, dec = findCampaignsByName(target) except ValueError: campaigns = [] output.write("{0}, {1}\n".format(target, campaigns)) output.flush() output.close()
[ "def", "K2findCampaigns_csv_main", "(", "args", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "\"Check which objects listed in a CSV table \"", "\"are (or were) observable by NASA's K2 mission.\"", ")", "parser", ".", "add...
Exposes K2findCampaigns-csv to the command line.
[ "Exposes", "K2findCampaigns", "-", "csv", "to", "the", "command", "line", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L172-L206
posener/mock-import
mock_import.py
mock_import
def mock_import(do_not_mock=None, **mock_kwargs): """ Mocks import statements by ignoring ImportErrors and replacing the missing module with a Mock. :param str|unicode|list[str|unicode] do_not_mock: names of modules that should exists, and an ImportError could be raised for. :param mock_kwargs: kwargs for MagicMock object. :return: patch object """ do_not_mock = _to_list(do_not_mock) def try_import(module_name, *args, **kwargs): try: return _builtins_import(module_name, *args, **kwargs) except: # intentionally catch all exceptions if any((_match(module_name, prefix) for prefix in do_not_mock)): # This is a module we need to import, # so we raise the exception instead of mocking it raise # Mock external module so we can peacefully create our client return mock.MagicMock(**mock_kwargs) return mock.patch('six.moves.builtins.__import__', try_import)
python
def mock_import(do_not_mock=None, **mock_kwargs): """ Mocks import statements by ignoring ImportErrors and replacing the missing module with a Mock. :param str|unicode|list[str|unicode] do_not_mock: names of modules that should exists, and an ImportError could be raised for. :param mock_kwargs: kwargs for MagicMock object. :return: patch object """ do_not_mock = _to_list(do_not_mock) def try_import(module_name, *args, **kwargs): try: return _builtins_import(module_name, *args, **kwargs) except: # intentionally catch all exceptions if any((_match(module_name, prefix) for prefix in do_not_mock)): # This is a module we need to import, # so we raise the exception instead of mocking it raise # Mock external module so we can peacefully create our client return mock.MagicMock(**mock_kwargs) return mock.patch('six.moves.builtins.__import__', try_import)
[ "def", "mock_import", "(", "do_not_mock", "=", "None", ",", "*", "*", "mock_kwargs", ")", ":", "do_not_mock", "=", "_to_list", "(", "do_not_mock", ")", "def", "try_import", "(", "module_name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ...
Mocks import statements by ignoring ImportErrors and replacing the missing module with a Mock. :param str|unicode|list[str|unicode] do_not_mock: names of modules that should exists, and an ImportError could be raised for. :param mock_kwargs: kwargs for MagicMock object. :return: patch object
[ "Mocks", "import", "statements", "by", "ignoring", "ImportErrors", "and", "replacing", "the", "missing", "module", "with", "a", "Mock", "." ]
train
https://github.com/posener/mock-import/blob/d2dcbc2ce2b5e2f072cc8e0d0615e8606777ee54/mock_import.py#L34-L58
jwodder/doapi
doapi/doapi.py
doapi.request
def request(self, url, params=None, data=None, method='GET'): """ Perform an HTTP request and return the response body as a decoded JSON value :param str url: the URL to make the request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param dict params: parameters to add to the URL's query string :param data: a value to send in the body of the request. If ``data`` is not a string, it will be serialized as JSON before sending; either way, the :mailheader:`Content-Type` header of the request will be set to :mimetype:`application/json`. Note that a ``data`` value of `None` means "Don't send any data"; to send an actual `None` value, convert it to JSON (i.e., the string ``"null"``) first. :param str method: the HTTP method to use: ``"GET"``, ``"POST"``, ``"PUT"``, or ``"DELETE"`` (case-insensitive); default: ``"GET"`` :return: a decoded JSON value, or `None` if no data was returned :rtype: `list` or `dict` (depending on the request) or `None` :raises ValueError: if ``method`` is an invalid value :raises DOAPIError: if the API endpoint replies with an error """ if url.startswith('/'): url = self.endpoint + url attrs = { "headers": {"Authorization": "Bearer " + self.api_token}, "params": params if params is not None else {}, "timeout": self.timeout, } method = method.upper() if data is not None: if not isinstance(data, string_types): data = json.dumps(data, cls=DOEncoder) attrs["data"] = data attrs["headers"]["Content-Type"] = "application/json" if method == 'GET': r = self.session.get(url, **attrs) elif method == 'POST': r = self.session.post(url, **attrs) elif method == 'PUT': r = self.session.put(url, **attrs) elif method == 'DELETE': r = self.session.delete(url, **attrs) else: raise ValueError('Unrecognized HTTP method: ' + repr(method)) self.last_response = r self.last_meta = None if not r.ok: raise DOAPIError(r) if r.text.strip(): # Even when returning "no content", the API can still return # whitespace. response = r.json() try: self.last_meta = response["meta"] except (KeyError, TypeError): pass return response
python
def request(self, url, params=None, data=None, method='GET'): """ Perform an HTTP request and return the response body as a decoded JSON value :param str url: the URL to make the request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param dict params: parameters to add to the URL's query string :param data: a value to send in the body of the request. If ``data`` is not a string, it will be serialized as JSON before sending; either way, the :mailheader:`Content-Type` header of the request will be set to :mimetype:`application/json`. Note that a ``data`` value of `None` means "Don't send any data"; to send an actual `None` value, convert it to JSON (i.e., the string ``"null"``) first. :param str method: the HTTP method to use: ``"GET"``, ``"POST"``, ``"PUT"``, or ``"DELETE"`` (case-insensitive); default: ``"GET"`` :return: a decoded JSON value, or `None` if no data was returned :rtype: `list` or `dict` (depending on the request) or `None` :raises ValueError: if ``method`` is an invalid value :raises DOAPIError: if the API endpoint replies with an error """ if url.startswith('/'): url = self.endpoint + url attrs = { "headers": {"Authorization": "Bearer " + self.api_token}, "params": params if params is not None else {}, "timeout": self.timeout, } method = method.upper() if data is not None: if not isinstance(data, string_types): data = json.dumps(data, cls=DOEncoder) attrs["data"] = data attrs["headers"]["Content-Type"] = "application/json" if method == 'GET': r = self.session.get(url, **attrs) elif method == 'POST': r = self.session.post(url, **attrs) elif method == 'PUT': r = self.session.put(url, **attrs) elif method == 'DELETE': r = self.session.delete(url, **attrs) else: raise ValueError('Unrecognized HTTP method: ' + repr(method)) self.last_response = r self.last_meta = None if not r.ok: raise DOAPIError(r) if r.text.strip(): # Even when returning "no content", the API can still return # whitespace. response = r.json() try: self.last_meta = response["meta"] except (KeyError, TypeError): pass return response
[ "def", "request", "(", "self", ",", "url", ",", "params", "=", "None", ",", "data", "=", "None", ",", "method", "=", "'GET'", ")", ":", "if", "url", ".", "startswith", "(", "'/'", ")", ":", "url", "=", "self", ".", "endpoint", "+", "url", "attrs"...
Perform an HTTP request and return the response body as a decoded JSON value :param str url: the URL to make the request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param dict params: parameters to add to the URL's query string :param data: a value to send in the body of the request. If ``data`` is not a string, it will be serialized as JSON before sending; either way, the :mailheader:`Content-Type` header of the request will be set to :mimetype:`application/json`. Note that a ``data`` value of `None` means "Don't send any data"; to send an actual `None` value, convert it to JSON (i.e., the string ``"null"``) first. :param str method: the HTTP method to use: ``"GET"``, ``"POST"``, ``"PUT"``, or ``"DELETE"`` (case-insensitive); default: ``"GET"`` :return: a decoded JSON value, or `None` if no data was returned :rtype: `list` or `dict` (depending on the request) or `None` :raises ValueError: if ``method`` is an invalid value :raises DOAPIError: if the API endpoint replies with an error
[ "Perform", "an", "HTTP", "request", "and", "return", "the", "response", "body", "as", "a", "decoded", "JSON", "value" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L81-L139
jwodder/doapi
doapi/doapi.py
doapi.last_rate_limit
def last_rate_limit(self): """ A `dict` of the rate limit information returned in the most recent response, or `None` if no requests have been made yet. The `dict` consists of all headers whose names begin with ``"RateLimit"`` (case insensitive). The DigitalOcean API specifies the following rate limit headers: :var string RateLimit-Limit: the number of requests that can be made per hour :var string RateLimit-Remaining: the number of requests remaining until the limit is reached :var string RateLimit-Reset: the Unix timestamp for the time when the oldest request will expire from rate limit consideration """ if self.last_response is None: return None else: return {k:v for k,v in iteritems(self.last_response.headers) if k.lower().startswith('ratelimit')}
python
def last_rate_limit(self): """ A `dict` of the rate limit information returned in the most recent response, or `None` if no requests have been made yet. The `dict` consists of all headers whose names begin with ``"RateLimit"`` (case insensitive). The DigitalOcean API specifies the following rate limit headers: :var string RateLimit-Limit: the number of requests that can be made per hour :var string RateLimit-Remaining: the number of requests remaining until the limit is reached :var string RateLimit-Reset: the Unix timestamp for the time when the oldest request will expire from rate limit consideration """ if self.last_response is None: return None else: return {k:v for k,v in iteritems(self.last_response.headers) if k.lower().startswith('ratelimit')}
[ "def", "last_rate_limit", "(", "self", ")", ":", "if", "self", ".", "last_response", "is", "None", ":", "return", "None", "else", ":", "return", "{", "k", ":", "v", "for", "k", ",", "v", "in", "iteritems", "(", "self", ".", "last_response", ".", "hea...
A `dict` of the rate limit information returned in the most recent response, or `None` if no requests have been made yet. The `dict` consists of all headers whose names begin with ``"RateLimit"`` (case insensitive). The DigitalOcean API specifies the following rate limit headers: :var string RateLimit-Limit: the number of requests that can be made per hour :var string RateLimit-Remaining: the number of requests remaining until the limit is reached :var string RateLimit-Reset: the Unix timestamp for the time when the oldest request will expire from rate limit consideration
[ "A", "dict", "of", "the", "rate", "limit", "information", "returned", "in", "the", "most", "recent", "response", "or", "None", "if", "no", "requests", "have", "been", "made", "yet", ".", "The", "dict", "consists", "of", "all", "headers", "whose", "names", ...
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L142-L162
jwodder/doapi
doapi/doapi.py
doapi.paginate
def paginate(self, url, key, params=None): """ Fetch a sequence of paginated resources from the API endpoint. The initial request to ``url`` and all subsequent requests must respond with a JSON object; the field specified by ``key`` must be a list, whose elements will be yielded, and the next request will be made to the URL in the ``.links.pages.next`` field until the responses no longer contain that field. :param str url: the URL to make the initial request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param str key: the field on each page containing a list of values to yield :param dict params: parameters to add to the initial URL's query string. A ``"per_page"`` parameter may be included to override the default :attr:`per_page` setting. :rtype: generator of decoded JSON values :raises ValueError: if a response body is not an object or ``key`` is not one of its keys :raises DOAPIError: if the API endpoint replies with an error """ if params is None: params = {} if self.per_page is not None and "per_page" not in params: params = dict(params, per_page=self.per_page) page = self.request(url, params=params) while True: try: objects = page[key] except (KeyError, TypeError): raise ValueError('{0!r}: not a key of the response body'\ .format(key)) for obj in objects: yield obj try: url = page["links"]["pages"]["next"] except KeyError: break page = self.request(url)
python
def paginate(self, url, key, params=None): """ Fetch a sequence of paginated resources from the API endpoint. The initial request to ``url`` and all subsequent requests must respond with a JSON object; the field specified by ``key`` must be a list, whose elements will be yielded, and the next request will be made to the URL in the ``.links.pages.next`` field until the responses no longer contain that field. :param str url: the URL to make the initial request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param str key: the field on each page containing a list of values to yield :param dict params: parameters to add to the initial URL's query string. A ``"per_page"`` parameter may be included to override the default :attr:`per_page` setting. :rtype: generator of decoded JSON values :raises ValueError: if a response body is not an object or ``key`` is not one of its keys :raises DOAPIError: if the API endpoint replies with an error """ if params is None: params = {} if self.per_page is not None and "per_page" not in params: params = dict(params, per_page=self.per_page) page = self.request(url, params=params) while True: try: objects = page[key] except (KeyError, TypeError): raise ValueError('{0!r}: not a key of the response body'\ .format(key)) for obj in objects: yield obj try: url = page["links"]["pages"]["next"] except KeyError: break page = self.request(url)
[ "def", "paginate", "(", "self", ",", "url", ",", "key", ",", "params", "=", "None", ")", ":", "if", "params", "is", "None", ":", "params", "=", "{", "}", "if", "self", ".", "per_page", "is", "not", "None", "and", "\"per_page\"", "not", "in", "param...
Fetch a sequence of paginated resources from the API endpoint. The initial request to ``url`` and all subsequent requests must respond with a JSON object; the field specified by ``key`` must be a list, whose elements will be yielded, and the next request will be made to the URL in the ``.links.pages.next`` field until the responses no longer contain that field. :param str url: the URL to make the initial request of. If ``url`` begins with a forward slash, :attr:`endpoint` is prepended to it; otherwise, ``url`` is treated as an absolute URL. :param str key: the field on each page containing a list of values to yield :param dict params: parameters to add to the initial URL's query string. A ``"per_page"`` parameter may be included to override the default :attr:`per_page` setting. :rtype: generator of decoded JSON values :raises ValueError: if a response body is not an object or ``key`` is not one of its keys :raises DOAPIError: if the API endpoint replies with an error
[ "Fetch", "a", "sequence", "of", "paginated", "resources", "from", "the", "API", "endpoint", ".", "The", "initial", "request", "to", "url", "and", "all", "subsequent", "requests", "must", "respond", "with", "a", "JSON", "object", ";", "the", "field", "specifi...
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L164-L203
jwodder/doapi
doapi/doapi.py
doapi.fetch_all_droplets
def fetch_all_droplets(self, tag_name=None): r""" Returns a generator that yields all of the droplets belonging to the account .. versionchanged:: 0.2.0 ``tag_name`` parameter added :param tag_name: if non-`None`, only droplets with the given tag are returned :type tag_name: string or `Tag` :rtype: generator of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ params = {} if tag_name is not None: params["tag_name"] = str(tag_name) return map(self._droplet, self.paginate('/v2/droplets', 'droplets', params=params))
python
def fetch_all_droplets(self, tag_name=None): r""" Returns a generator that yields all of the droplets belonging to the account .. versionchanged:: 0.2.0 ``tag_name`` parameter added :param tag_name: if non-`None`, only droplets with the given tag are returned :type tag_name: string or `Tag` :rtype: generator of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ params = {} if tag_name is not None: params["tag_name"] = str(tag_name) return map(self._droplet, self.paginate('/v2/droplets', 'droplets', params=params))
[ "def", "fetch_all_droplets", "(", "self", ",", "tag_name", "=", "None", ")", ":", "params", "=", "{", "}", "if", "tag_name", "is", "not", "None", ":", "params", "[", "\"tag_name\"", "]", "=", "str", "(", "tag_name", ")", "return", "map", "(", "self", ...
r""" Returns a generator that yields all of the droplets belonging to the account .. versionchanged:: 0.2.0 ``tag_name`` parameter added :param tag_name: if non-`None`, only droplets with the given tag are returned :type tag_name: string or `Tag` :rtype: generator of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error
[ "r", "Returns", "a", "generator", "that", "yields", "all", "of", "the", "droplets", "belonging", "to", "the", "account" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L230-L248
jwodder/doapi
doapi/doapi.py
doapi.create_droplet
def create_droplet(self, name, image, size, region, ssh_keys=None, backups=None, ipv6=None, private_networking=None, user_data=None, **kwargs): """ Create a new droplet. All fields other than ``name``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` object will represent the droplet at the moment of creation; the actual droplet may not be active yet and may not have even been assigned an IP address. To wait for the droplet to activate, use the `Droplet`'s :meth:`~Droplet.wait` method. :param str name: a name for the droplet :param image: the image ID, slug, or `Image` object representing the base image to use for the droplet :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplet :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplet :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplet's :file:`/root/.ssh/authorized_keys` file :param bool backups: whether to enable automatic backups on the new droplet :param bool ipv6: whether to enable IPv6 on the new droplet :param bool private_networking: whether to enable private networking for the new droplet :param str user_data: a string of user data/metadata for the droplet :param kwargs: additional fields to include in the API request :return: the new droplet resource :rtype: Droplet :raises DOAPIError: if the API endpoint replies with an error """ data = { "name": name, "image": image.id if isinstance(image, Image) else image, "size": str(size), "region": str(region), } if ssh_keys is not None: data["ssh_keys"] = [k._id if isinstance(k, SSHKey) else k for k in ssh_keys] if backups is not None: data["backups"] = backups if ipv6 is not None: data["ipv6"] = ipv6 if private_networking is not None: data["private_networking"] = private_networking if user_data is not None: data["user_data"] = user_data data.update(kwargs) return self._droplet(self.request('/v2/droplets', method='POST', data=data)["droplet"])
python
def create_droplet(self, name, image, size, region, ssh_keys=None, backups=None, ipv6=None, private_networking=None, user_data=None, **kwargs): """ Create a new droplet. All fields other than ``name``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` object will represent the droplet at the moment of creation; the actual droplet may not be active yet and may not have even been assigned an IP address. To wait for the droplet to activate, use the `Droplet`'s :meth:`~Droplet.wait` method. :param str name: a name for the droplet :param image: the image ID, slug, or `Image` object representing the base image to use for the droplet :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplet :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplet :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplet's :file:`/root/.ssh/authorized_keys` file :param bool backups: whether to enable automatic backups on the new droplet :param bool ipv6: whether to enable IPv6 on the new droplet :param bool private_networking: whether to enable private networking for the new droplet :param str user_data: a string of user data/metadata for the droplet :param kwargs: additional fields to include in the API request :return: the new droplet resource :rtype: Droplet :raises DOAPIError: if the API endpoint replies with an error """ data = { "name": name, "image": image.id if isinstance(image, Image) else image, "size": str(size), "region": str(region), } if ssh_keys is not None: data["ssh_keys"] = [k._id if isinstance(k, SSHKey) else k for k in ssh_keys] if backups is not None: data["backups"] = backups if ipv6 is not None: data["ipv6"] = ipv6 if private_networking is not None: data["private_networking"] = private_networking if user_data is not None: data["user_data"] = user_data data.update(kwargs) return self._droplet(self.request('/v2/droplets', method='POST', data=data)["droplet"])
[ "def", "create_droplet", "(", "self", ",", "name", ",", "image", ",", "size", ",", "region", ",", "ssh_keys", "=", "None", ",", "backups", "=", "None", ",", "ipv6", "=", "None", ",", "private_networking", "=", "None", ",", "user_data", "=", "None", ","...
Create a new droplet. All fields other than ``name``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` object will represent the droplet at the moment of creation; the actual droplet may not be active yet and may not have even been assigned an IP address. To wait for the droplet to activate, use the `Droplet`'s :meth:`~Droplet.wait` method. :param str name: a name for the droplet :param image: the image ID, slug, or `Image` object representing the base image to use for the droplet :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplet :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplet :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplet's :file:`/root/.ssh/authorized_keys` file :param bool backups: whether to enable automatic backups on the new droplet :param bool ipv6: whether to enable IPv6 on the new droplet :param bool private_networking: whether to enable private networking for the new droplet :param str user_data: a string of user data/metadata for the droplet :param kwargs: additional fields to include in the API request :return: the new droplet resource :rtype: Droplet :raises DOAPIError: if the API endpoint replies with an error
[ "Create", "a", "new", "droplet", ".", "All", "fields", "other", "than", "name", "image", "size", "and", "region", "are", "optional", "and", "will", "be", "omitted", "from", "the", "API", "request", "if", "not", "specified", "." ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L250-L306
jwodder/doapi
doapi/doapi.py
doapi.create_multiple_droplets
def create_multiple_droplets(self, names, image, size, region, ssh_keys=None, backups=None, ipv6=None, private_networking=None, user_data=None, **kwargs): r""" Create multiple new droplets at once with the same image, size, etc., differing only in name. All fields other than ``names``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` objects will represent the droplets at the moment of creation; the actual droplets may not be active yet and may not have even been assigned IP addresses. To wait for the droplets to activate, use their :meth:`~Droplet.wait` method or `wait_droplets`. :param names: the names for the new droplets :type names: list of strings :param image: the image ID, slug, or `Image` object representing the base image to use for the droplets :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplets :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplets :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplets' :file:`/root/.ssh/authorized_keys` files :param bool backups: whether to enable automatic backups on the new droplets :param bool ipv6: whether to enable IPv6 on the new droplets :param bool private_networking: whether to enable private networking for the new droplets :param str user_data: a string of user data/metadata for the droplets :param kwargs: additional fields to include in the API request :return: the new droplet resources :rtype: list of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ data = { "names": names, "image": image.id if isinstance(image, Image) else image, "size": str(size), "region": str(region), } if ssh_keys is not None: data["ssh_keys"] = [k._id if isinstance(k, SSHKey) else k for k in ssh_keys] if backups is not None: data["backups"] = backups if ipv6 is not None: data["ipv6"] = ipv6 if private_networking is not None: data["private_networking"] = private_networking if user_data is not None: data["user_data"] = user_data data.update(kwargs) return list(map(self._droplet, self.request('/v2/droplets', method='POST', data=data)["droplets"]))
python
def create_multiple_droplets(self, names, image, size, region, ssh_keys=None, backups=None, ipv6=None, private_networking=None, user_data=None, **kwargs): r""" Create multiple new droplets at once with the same image, size, etc., differing only in name. All fields other than ``names``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` objects will represent the droplets at the moment of creation; the actual droplets may not be active yet and may not have even been assigned IP addresses. To wait for the droplets to activate, use their :meth:`~Droplet.wait` method or `wait_droplets`. :param names: the names for the new droplets :type names: list of strings :param image: the image ID, slug, or `Image` object representing the base image to use for the droplets :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplets :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplets :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplets' :file:`/root/.ssh/authorized_keys` files :param bool backups: whether to enable automatic backups on the new droplets :param bool ipv6: whether to enable IPv6 on the new droplets :param bool private_networking: whether to enable private networking for the new droplets :param str user_data: a string of user data/metadata for the droplets :param kwargs: additional fields to include in the API request :return: the new droplet resources :rtype: list of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ data = { "names": names, "image": image.id if isinstance(image, Image) else image, "size": str(size), "region": str(region), } if ssh_keys is not None: data["ssh_keys"] = [k._id if isinstance(k, SSHKey) else k for k in ssh_keys] if backups is not None: data["backups"] = backups if ipv6 is not None: data["ipv6"] = ipv6 if private_networking is not None: data["private_networking"] = private_networking if user_data is not None: data["user_data"] = user_data data.update(kwargs) return list(map(self._droplet, self.request('/v2/droplets', method='POST', data=data)["droplets"]))
[ "def", "create_multiple_droplets", "(", "self", ",", "names", ",", "image", ",", "size", ",", "region", ",", "ssh_keys", "=", "None", ",", "backups", "=", "None", ",", "ipv6", "=", "None", ",", "private_networking", "=", "None", ",", "user_data", "=", "N...
r""" Create multiple new droplets at once with the same image, size, etc., differing only in name. All fields other than ``names``, ``image``, ``size``, and ``region`` are optional and will be omitted from the API request if not specified. The returned `Droplet` objects will represent the droplets at the moment of creation; the actual droplets may not be active yet and may not have even been assigned IP addresses. To wait for the droplets to activate, use their :meth:`~Droplet.wait` method or `wait_droplets`. :param names: the names for the new droplets :type names: list of strings :param image: the image ID, slug, or `Image` object representing the base image to use for the droplets :type image: integer, string, or `Image` :param size: the slug or `Size` object representing the size of the new droplets :type size: string or `Size` :param region: the slug or `Region` object representing the region in which to create the droplets :type region: string or `Region` :param iterable ssh_keys: an iterable of SSH key resource IDs, SSH key fingerprints, and/or `SSHKey` objects specifying the public keys to add to the new droplets' :file:`/root/.ssh/authorized_keys` files :param bool backups: whether to enable automatic backups on the new droplets :param bool ipv6: whether to enable IPv6 on the new droplets :param bool private_networking: whether to enable private networking for the new droplets :param str user_data: a string of user data/metadata for the droplets :param kwargs: additional fields to include in the API request :return: the new droplet resources :rtype: list of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error
[ "r", "Create", "multiple", "new", "droplets", "at", "once", "with", "the", "same", "image", "size", "etc", ".", "differing", "only", "in", "name", ".", "All", "fields", "other", "than", "names", "image", "size", "and", "region", "are", "optional", "and", ...
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L308-L368
jwodder/doapi
doapi/doapi.py
doapi.fetch_all_droplet_neighbors
def fetch_all_droplet_neighbors(self): r""" Returns a generator of all sets of multiple droplets that are running on the same physical hardware :rtype: generator of lists of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ for hood in self.paginate('/v2/reports/droplet_neighbors', 'neighbors'): yield list(map(self._droplet, hood))
python
def fetch_all_droplet_neighbors(self): r""" Returns a generator of all sets of multiple droplets that are running on the same physical hardware :rtype: generator of lists of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error """ for hood in self.paginate('/v2/reports/droplet_neighbors', 'neighbors'): yield list(map(self._droplet, hood))
[ "def", "fetch_all_droplet_neighbors", "(", "self", ")", ":", "for", "hood", "in", "self", ".", "paginate", "(", "'/v2/reports/droplet_neighbors'", ",", "'neighbors'", ")", ":", "yield", "list", "(", "map", "(", "self", ".", "_droplet", ",", "hood", ")", ")" ...
r""" Returns a generator of all sets of multiple droplets that are running on the same physical hardware :rtype: generator of lists of `Droplet`\ s :raises DOAPIError: if the API endpoint replies with an error
[ "r", "Returns", "a", "generator", "of", "all", "sets", "of", "multiple", "droplets", "that", "are", "running", "on", "the", "same", "physical", "hardware" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L370-L379
jwodder/doapi
doapi/doapi.py
doapi.wait_droplets
def wait_droplets(self, droplets, status=None, locked=None, wait_interval=None, wait_time=None): r""" Poll the server periodically until all droplets in ``droplets`` have reached some final state, yielding each `Droplet`'s final value when it's done. If ``status`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``status`` field to equal the given value. If ``locked`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``locked`` field to equal (the truth value of) the given value. Exactly one of ``status`` and ``locked`` must be non-`None`. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress droplets) is raised. If a `KeyboardInterrupt` is caught, any remaining droplets are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout .. versionchanged:: 0.2.0 ``locked`` parameter added .. versionchanged:: 0.2.0 No longer waits for actions to complete :param iterable droplets: an iterable of `Droplet`\ s and/or other values that are acceptable arguments to :meth:`fetch_droplet` :param status: When non-`None`, the desired value for the ``status`` field of each `Droplet`, which should be one of `Droplet.STATUS_ACTIVE`, `Droplet.STATUS_ARCHIVE`, `Droplet.STATUS_NEW`, and `Droplet.STATUS_OFF`. (For the sake of forwards-compatibility, any other value is accepted as well.) :type status: string or `None` :param locked: When non-`None`, the desired value for the ``locked`` field of each `Droplet` :type locked: `bool` or `None` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any droplets have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Droplet`\ s :raises TypeError: if both or neither of ``status`` & ``locked`` are defined :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ if (status is None) == (locked is None): ### TODO: Is TypeError the right type of error? raise TypeError('Exactly one of "status" and "locked" must be' ' specified') droplets = map(self._droplet, droplets) if status is not None: return self._wait(droplets, "status", status, wait_interval, wait_time) if locked is not None: return self._wait(droplets, "locked", bool(locked), wait_interval, wait_time)
python
def wait_droplets(self, droplets, status=None, locked=None, wait_interval=None, wait_time=None): r""" Poll the server periodically until all droplets in ``droplets`` have reached some final state, yielding each `Droplet`'s final value when it's done. If ``status`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``status`` field to equal the given value. If ``locked`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``locked`` field to equal (the truth value of) the given value. Exactly one of ``status`` and ``locked`` must be non-`None`. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress droplets) is raised. If a `KeyboardInterrupt` is caught, any remaining droplets are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout .. versionchanged:: 0.2.0 ``locked`` parameter added .. versionchanged:: 0.2.0 No longer waits for actions to complete :param iterable droplets: an iterable of `Droplet`\ s and/or other values that are acceptable arguments to :meth:`fetch_droplet` :param status: When non-`None`, the desired value for the ``status`` field of each `Droplet`, which should be one of `Droplet.STATUS_ACTIVE`, `Droplet.STATUS_ARCHIVE`, `Droplet.STATUS_NEW`, and `Droplet.STATUS_OFF`. (For the sake of forwards-compatibility, any other value is accepted as well.) :type status: string or `None` :param locked: When non-`None`, the desired value for the ``locked`` field of each `Droplet` :type locked: `bool` or `None` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any droplets have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Droplet`\ s :raises TypeError: if both or neither of ``status`` & ``locked`` are defined :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ if (status is None) == (locked is None): ### TODO: Is TypeError the right type of error? raise TypeError('Exactly one of "status" and "locked" must be' ' specified') droplets = map(self._droplet, droplets) if status is not None: return self._wait(droplets, "status", status, wait_interval, wait_time) if locked is not None: return self._wait(droplets, "locked", bool(locked), wait_interval, wait_time)
[ "def", "wait_droplets", "(", "self", ",", "droplets", ",", "status", "=", "None", ",", "locked", "=", "None", ",", "wait_interval", "=", "None", ",", "wait_time", "=", "None", ")", ":", "if", "(", "status", "is", "None", ")", "==", "(", "locked", "is...
r""" Poll the server periodically until all droplets in ``droplets`` have reached some final state, yielding each `Droplet`'s final value when it's done. If ``status`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``status`` field to equal the given value. If ``locked`` is non-`None`, ``wait_droplets`` will wait for each droplet's ``locked`` field to equal (the truth value of) the given value. Exactly one of ``status`` and ``locked`` must be non-`None`. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress droplets) is raised. If a `KeyboardInterrupt` is caught, any remaining droplets are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout .. versionchanged:: 0.2.0 ``locked`` parameter added .. versionchanged:: 0.2.0 No longer waits for actions to complete :param iterable droplets: an iterable of `Droplet`\ s and/or other values that are acceptable arguments to :meth:`fetch_droplet` :param status: When non-`None`, the desired value for the ``status`` field of each `Droplet`, which should be one of `Droplet.STATUS_ACTIVE`, `Droplet.STATUS_ARCHIVE`, `Droplet.STATUS_NEW`, and `Droplet.STATUS_OFF`. (For the sake of forwards-compatibility, any other value is accepted as well.) :type status: string or `None` :param locked: When non-`None`, the desired value for the ``locked`` field of each `Droplet` :type locked: `bool` or `None` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any droplets have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Droplet`\ s :raises TypeError: if both or neither of ``status`` & ``locked`` are defined :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded
[ "r", "Poll", "the", "server", "periodically", "until", "all", "droplets", "in", "droplets", "have", "reached", "some", "final", "state", "yielding", "each", "Droplet", "s", "final", "value", "when", "it", "s", "done", ".", "If", "status", "is", "non", "-",...
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L381-L441
jwodder/doapi
doapi/doapi.py
doapi.wait_actions
def wait_actions(self, actions, wait_interval=None, wait_time=None): r""" Poll the server periodically until all actions in ``actions`` have either completed or errored out, yielding each `Action`'s final value as it ends. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable actions: an iterable of `Action`\ s and/or other values that are acceptable arguments to :meth:`fetch_action` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Action`\ s :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ return self._wait(map(self._action, actions), "done", True, wait_interval, wait_time)
python
def wait_actions(self, actions, wait_interval=None, wait_time=None): r""" Poll the server periodically until all actions in ``actions`` have either completed or errored out, yielding each `Action`'s final value as it ends. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable actions: an iterable of `Action`\ s and/or other values that are acceptable arguments to :meth:`fetch_action` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Action`\ s :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ return self._wait(map(self._action, actions), "done", True, wait_interval, wait_time)
[ "def", "wait_actions", "(", "self", ",", "actions", ",", "wait_interval", "=", "None", ",", "wait_time", "=", "None", ")", ":", "return", "self", ".", "_wait", "(", "map", "(", "self", ".", "_action", ",", "actions", ")", ",", "\"done\"", ",", "True", ...
r""" Poll the server periodically until all actions in ``actions`` have either completed or errored out, yielding each `Action`'s final value as it ends. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable actions: an iterable of `Action`\ s and/or other values that are acceptable arguments to :meth:`fetch_action` :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of `Action`\ s :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded
[ "r", "Poll", "the", "server", "periodically", "until", "all", "actions", "in", "actions", "have", "either", "completed", "or", "errored", "out", "yielding", "each", "Action", "s", "final", "value", "as", "it", "ends", "." ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L493-L522
jwodder/doapi
doapi/doapi.py
doapi.wait_actions_on_objects
def wait_actions_on_objects(self, objects, wait_interval=None, wait_time=None): """ .. versionadded:: 0.2.0 Poll the server periodically until the most recent action on each resource in ``objects`` has finished, yielding each resource's final state when the corresponding action is done. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. :param iterable objects: an iterable of resource objects that have ``fetch_last_action`` methods :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of objects :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ acts = [] for o in objects: a = o.fetch_last_action() if a is None: yield o else: acts.append(a) for a in self.wait_actions(acts, wait_interval, wait_time): yield a.fetch_resource()
python
def wait_actions_on_objects(self, objects, wait_interval=None, wait_time=None): """ .. versionadded:: 0.2.0 Poll the server periodically until the most recent action on each resource in ``objects`` has finished, yielding each resource's final state when the corresponding action is done. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. :param iterable objects: an iterable of resource objects that have ``fetch_last_action`` methods :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of objects :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ acts = [] for o in objects: a = o.fetch_last_action() if a is None: yield o else: acts.append(a) for a in self.wait_actions(acts, wait_interval, wait_time): yield a.fetch_resource()
[ "def", "wait_actions_on_objects", "(", "self", ",", "objects", ",", "wait_interval", "=", "None", ",", "wait_time", "=", "None", ")", ":", "acts", "=", "[", "]", "for", "o", "in", "objects", ":", "a", "=", "o", ".", "fetch_last_action", "(", ")", "if",...
.. versionadded:: 0.2.0 Poll the server periodically until the most recent action on each resource in ``objects`` has finished, yielding each resource's final state when the corresponding action is done. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress actions) is raised. If a `KeyboardInterrupt` is caught, any remaining actions are returned immediately without waiting for completion. :param iterable objects: an iterable of resource objects that have ``fetch_last_action`` methods :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any actions have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator of objects :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded
[ "..", "versionadded", "::", "0", ".", "2", ".", "0" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L524-L560
jwodder/doapi
doapi/doapi.py
doapi.create_ssh_key
def create_ssh_key(self, name, public_key, **kwargs): """ Add a new SSH public key resource to the account :param str name: the name to give the new SSH key resource :param str public_key: the text of the public key to register, in the form used by :file:`authorized_keys` files :param kwargs: additional fields to include in the API request :return: the new SSH key resource :rtype: SSHKey :raises DOAPIError: if the API endpoint replies with an error """ data = {"name": name, "public_key": public_key} data.update(kwargs) return self._ssh_key(self.request('/v2/account/keys', method='POST', data=data)["ssh_key"])
python
def create_ssh_key(self, name, public_key, **kwargs): """ Add a new SSH public key resource to the account :param str name: the name to give the new SSH key resource :param str public_key: the text of the public key to register, in the form used by :file:`authorized_keys` files :param kwargs: additional fields to include in the API request :return: the new SSH key resource :rtype: SSHKey :raises DOAPIError: if the API endpoint replies with an error """ data = {"name": name, "public_key": public_key} data.update(kwargs) return self._ssh_key(self.request('/v2/account/keys', method='POST', data=data)["ssh_key"])
[ "def", "create_ssh_key", "(", "self", ",", "name", ",", "public_key", ",", "*", "*", "kwargs", ")", ":", "data", "=", "{", "\"name\"", ":", "name", ",", "\"public_key\"", ":", "public_key", "}", "data", ".", "update", "(", "kwargs", ")", "return", "sel...
Add a new SSH public key resource to the account :param str name: the name to give the new SSH key resource :param str public_key: the text of the public key to register, in the form used by :file:`authorized_keys` files :param kwargs: additional fields to include in the API request :return: the new SSH key resource :rtype: SSHKey :raises DOAPIError: if the API endpoint replies with an error
[ "Add", "a", "new", "SSH", "public", "key", "resource", "to", "the", "account" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L599-L614
jwodder/doapi
doapi/doapi.py
doapi.fetch_all_images
def fetch_all_images(self, type=None, private=None): # pylint: disable=redefined-builtin r""" Returns a generator that yields all of the images available to the account :param type: the type of images to fetch: ``"distribution"``, ``"application"``, or all (`None`); default: `None` :type type: string or None :param bool private: whether to only return the user's private images; default: return all images :rtype: generator of `Image`\ s :raises DOAPIError: if the API endpoint replies with an error """ params = {} if type is not None: params["type"] = type if private is not None: params["private"] = 'true' if private else 'false' return map(self._image, self.paginate('/v2/images', 'images', params=params))
python
def fetch_all_images(self, type=None, private=None): # pylint: disable=redefined-builtin r""" Returns a generator that yields all of the images available to the account :param type: the type of images to fetch: ``"distribution"``, ``"application"``, or all (`None`); default: `None` :type type: string or None :param bool private: whether to only return the user's private images; default: return all images :rtype: generator of `Image`\ s :raises DOAPIError: if the API endpoint replies with an error """ params = {} if type is not None: params["type"] = type if private is not None: params["private"] = 'true' if private else 'false' return map(self._image, self.paginate('/v2/images', 'images', params=params))
[ "def", "fetch_all_images", "(", "self", ",", "type", "=", "None", ",", "private", "=", "None", ")", ":", "# pylint: disable=redefined-builtin", "params", "=", "{", "}", "if", "type", "is", "not", "None", ":", "params", "[", "\"type\"", "]", "=", "type", ...
r""" Returns a generator that yields all of the images available to the account :param type: the type of images to fetch: ``"distribution"``, ``"application"``, or all (`None`); default: `None` :type type: string or None :param bool private: whether to only return the user's private images; default: return all images :rtype: generator of `Image`\ s :raises DOAPIError: if the API endpoint replies with an error
[ "r", "Returns", "a", "generator", "that", "yields", "all", "of", "the", "images", "available", "to", "the", "account" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L651-L671
jwodder/doapi
doapi/doapi.py
doapi.create_domain
def create_domain(self, name, ip_address, **kwargs): """ Add a new domain name resource to the account. Note that this method does not actually register a new domain name; it merely configures DigitalOcean's nameservers to provide DNS resolution for the domain. See `How To Set Up a Host Name with DigitalOcean <https://www.digitalocean.com/community/tutorials/how-to-set-up-a-host-name-with-digitalocean>`_ for more information. :param str name: the domain name to add :param ip_address: the IP address to which the domain should point :type ip_address: string or `FloatingIP` :param kwargs: additional fields to include in the API request :return: the new domain resource :rtype: Domain :raises DOAPIError: if the API endpoint replies with an error """ if isinstance(ip_address, FloatingIP): ip_address = ip_address.ip data = {"name": name, "ip_address": ip_address} data.update(kwargs) return self._domain(self.request('/v2/domains', method='POST', data=data)["domain"])
python
def create_domain(self, name, ip_address, **kwargs): """ Add a new domain name resource to the account. Note that this method does not actually register a new domain name; it merely configures DigitalOcean's nameservers to provide DNS resolution for the domain. See `How To Set Up a Host Name with DigitalOcean <https://www.digitalocean.com/community/tutorials/how-to-set-up-a-host-name-with-digitalocean>`_ for more information. :param str name: the domain name to add :param ip_address: the IP address to which the domain should point :type ip_address: string or `FloatingIP` :param kwargs: additional fields to include in the API request :return: the new domain resource :rtype: Domain :raises DOAPIError: if the API endpoint replies with an error """ if isinstance(ip_address, FloatingIP): ip_address = ip_address.ip data = {"name": name, "ip_address": ip_address} data.update(kwargs) return self._domain(self.request('/v2/domains', method='POST', data=data)["domain"])
[ "def", "create_domain", "(", "self", ",", "name", ",", "ip_address", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "ip_address", ",", "FloatingIP", ")", ":", "ip_address", "=", "ip_address", ".", "ip", "data", "=", "{", "\"name\"", ":", "...
Add a new domain name resource to the account. Note that this method does not actually register a new domain name; it merely configures DigitalOcean's nameservers to provide DNS resolution for the domain. See `How To Set Up a Host Name with DigitalOcean <https://www.digitalocean.com/community/tutorials/how-to-set-up-a-host-name-with-digitalocean>`_ for more information. :param str name: the domain name to add :param ip_address: the IP address to which the domain should point :type ip_address: string or `FloatingIP` :param kwargs: additional fields to include in the API request :return: the new domain resource :rtype: Domain :raises DOAPIError: if the API endpoint replies with an error
[ "Add", "a", "new", "domain", "name", "resource", "to", "the", "account", "." ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L793-L816
jwodder/doapi
doapi/doapi.py
doapi.create_floating_ip
def create_floating_ip(self, droplet_id=None, region=None, **kwargs): """ Create a new floating IP assigned to a droplet or reserved to a region. Either ``droplet_id`` or ``region`` must be specified, but not both. The returned `FloatingIP` object will represent the IP at the moment of creation; if the IP address is supposed to be assigned to a droplet, the assignment may not have been completed at the time the object is returned. To wait for the assignment to complete, use the `FloatingIP`'s :meth:`~FloatingIP.wait_for_action` method. :param droplet_id: the droplet to assign the floating IP to as either an ID or a `Droplet` object :type droplet_id: integer or `Droplet` :param region: the region to reserve the floating IP to as either a slug or a `Region` object :type region: string or `Region` :param kwargs: additional fields to include in the API request :return: the new floating IP :rtype: FloatingIP :raises TypeError: if both ``droplet_id`` & ``region`` or neither of them are defined :raises DOAPIError: if the API endpoint replies with an error """ if (droplet_id is None) == (region is None): ### TODO: Is TypeError the right type of error? raise TypeError('Exactly one of "droplet_id" and "region" must be' ' specified') if droplet_id is not None: if isinstance(droplet_id, Droplet): droplet_id = droplet_id.id data = {"droplet_id": droplet_id} else: if isinstance(region, Region): region = region.slug data = {"region": region} data.update(kwargs) return self._floating_ip(self.request('/v2/floating_ips', method='POST', data=data)["floating_ip"])
python
def create_floating_ip(self, droplet_id=None, region=None, **kwargs): """ Create a new floating IP assigned to a droplet or reserved to a region. Either ``droplet_id`` or ``region`` must be specified, but not both. The returned `FloatingIP` object will represent the IP at the moment of creation; if the IP address is supposed to be assigned to a droplet, the assignment may not have been completed at the time the object is returned. To wait for the assignment to complete, use the `FloatingIP`'s :meth:`~FloatingIP.wait_for_action` method. :param droplet_id: the droplet to assign the floating IP to as either an ID or a `Droplet` object :type droplet_id: integer or `Droplet` :param region: the region to reserve the floating IP to as either a slug or a `Region` object :type region: string or `Region` :param kwargs: additional fields to include in the API request :return: the new floating IP :rtype: FloatingIP :raises TypeError: if both ``droplet_id`` & ``region`` or neither of them are defined :raises DOAPIError: if the API endpoint replies with an error """ if (droplet_id is None) == (region is None): ### TODO: Is TypeError the right type of error? raise TypeError('Exactly one of "droplet_id" and "region" must be' ' specified') if droplet_id is not None: if isinstance(droplet_id, Droplet): droplet_id = droplet_id.id data = {"droplet_id": droplet_id} else: if isinstance(region, Region): region = region.slug data = {"region": region} data.update(kwargs) return self._floating_ip(self.request('/v2/floating_ips', method='POST', data=data)["floating_ip"])
[ "def", "create_floating_ip", "(", "self", ",", "droplet_id", "=", "None", ",", "region", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "(", "droplet_id", "is", "None", ")", "==", "(", "region", "is", "None", ")", ":", "### TODO: Is TypeError the ...
Create a new floating IP assigned to a droplet or reserved to a region. Either ``droplet_id`` or ``region`` must be specified, but not both. The returned `FloatingIP` object will represent the IP at the moment of creation; if the IP address is supposed to be assigned to a droplet, the assignment may not have been completed at the time the object is returned. To wait for the assignment to complete, use the `FloatingIP`'s :meth:`~FloatingIP.wait_for_action` method. :param droplet_id: the droplet to assign the floating IP to as either an ID or a `Droplet` object :type droplet_id: integer or `Droplet` :param region: the region to reserve the floating IP to as either a slug or a `Region` object :type region: string or `Region` :param kwargs: additional fields to include in the API request :return: the new floating IP :rtype: FloatingIP :raises TypeError: if both ``droplet_id`` & ``region`` or neither of them are defined :raises DOAPIError: if the API endpoint replies with an error
[ "Create", "a", "new", "floating", "IP", "assigned", "to", "a", "droplet", "or", "reserved", "to", "a", "region", ".", "Either", "droplet_id", "or", "region", "must", "be", "specified", "but", "not", "both", "." ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L856-L894
jwodder/doapi
doapi/doapi.py
doapi.create_tag
def create_tag(self, name): """ .. versionadded:: 0.2.0 Add a new tag resource to the account :param str name: the name of the new tag :rtype: Tag :raises DOAPIError: if the API endpoint replies with an error """ return self._tag(self.request('/v2/tags', method='POST', data={ "name": name, })["tag"])
python
def create_tag(self, name): """ .. versionadded:: 0.2.0 Add a new tag resource to the account :param str name: the name of the new tag :rtype: Tag :raises DOAPIError: if the API endpoint replies with an error """ return self._tag(self.request('/v2/tags', method='POST', data={ "name": name, })["tag"])
[ "def", "create_tag", "(", "self", ",", "name", ")", ":", "return", "self", ".", "_tag", "(", "self", ".", "request", "(", "'/v2/tags'", ",", "method", "=", "'POST'", ",", "data", "=", "{", "\"name\"", ":", "name", ",", "}", ")", "[", "\"tag\"", "]"...
.. versionadded:: 0.2.0 Add a new tag resource to the account :param str name: the name of the new tag :rtype: Tag :raises DOAPIError: if the API endpoint replies with an error
[ "..", "versionadded", "::", "0", ".", "2", ".", "0" ]
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L935-L947
jwodder/doapi
doapi/doapi.py
doapi._wait
def _wait(self, objects, attr, value, wait_interval=None, wait_time=None): r""" Calls the ``fetch`` method of each object in ``objects`` periodically until the ``attr`` attribute of each one equals ``value``, yielding the final state of each object as soon as it satisfies the condition. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress objects) is raised. If a `KeyboardInterrupt` is caught, any remaining objects are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable objects: an iterable of `Resource`\ s with ``fetch`` methods :param string attr: the attribute to watch :param value: the value of ``attr`` to wait for :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any objects have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ objects = list(objects) if not objects: return if wait_interval is None: wait_interval = self.wait_interval if wait_time < 0: end_time = None else: if wait_time is None: wait_time = self.wait_time if wait_time is None or wait_time < 0: end_time = None else: end_time = time() + wait_time while end_time is None or time() < end_time: loop_start = time() next_objs = [] for o in objects: obj = o.fetch() if getattr(obj, attr, None) == value: yield obj else: next_objs.append(obj) objects = next_objs if not objects: break loop_end = time() time_left = wait_interval - (loop_end - loop_start) if end_time is not None: time_left = min(time_left, end_time - loop_end) if time_left > 0: try: sleep(time_left) except KeyboardInterrupt: for o in objects: yield o return if objects: raise WaitTimeoutError(objects, attr, value, wait_interval, wait_time)
python
def _wait(self, objects, attr, value, wait_interval=None, wait_time=None): r""" Calls the ``fetch`` method of each object in ``objects`` periodically until the ``attr`` attribute of each one equals ``value``, yielding the final state of each object as soon as it satisfies the condition. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress objects) is raised. If a `KeyboardInterrupt` is caught, any remaining objects are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable objects: an iterable of `Resource`\ s with ``fetch`` methods :param string attr: the attribute to watch :param value: the value of ``attr`` to wait for :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any objects have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded """ objects = list(objects) if not objects: return if wait_interval is None: wait_interval = self.wait_interval if wait_time < 0: end_time = None else: if wait_time is None: wait_time = self.wait_time if wait_time is None or wait_time < 0: end_time = None else: end_time = time() + wait_time while end_time is None or time() < end_time: loop_start = time() next_objs = [] for o in objects: obj = o.fetch() if getattr(obj, attr, None) == value: yield obj else: next_objs.append(obj) objects = next_objs if not objects: break loop_end = time() time_left = wait_interval - (loop_end - loop_start) if end_time is not None: time_left = min(time_left, end_time - loop_end) if time_left > 0: try: sleep(time_left) except KeyboardInterrupt: for o in objects: yield o return if objects: raise WaitTimeoutError(objects, attr, value, wait_interval, wait_time)
[ "def", "_wait", "(", "self", ",", "objects", ",", "attr", ",", "value", ",", "wait_interval", "=", "None", ",", "wait_time", "=", "None", ")", ":", "objects", "=", "list", "(", "objects", ")", "if", "not", "objects", ":", "return", "if", "wait_interval...
r""" Calls the ``fetch`` method of each object in ``objects`` periodically until the ``attr`` attribute of each one equals ``value``, yielding the final state of each object as soon as it satisfies the condition. If ``wait_time`` is exceeded, a `WaitTimeoutError` (containing any remaining in-progress objects) is raised. If a `KeyboardInterrupt` is caught, any remaining objects are returned immediately without waiting for completion. .. versionchanged:: 0.2.0 Raises `WaitTimeoutError` on timeout :param iterable objects: an iterable of `Resource`\ s with ``fetch`` methods :param string attr: the attribute to watch :param value: the value of ``attr`` to wait for :param number wait_interval: how many seconds to sleep between requests; defaults to :attr:`wait_interval` if not specified or `None` :param number wait_time: the total number of seconds after which the method will raise an error if any objects have not yet completed, or a negative number to wait indefinitely; defaults to :attr:`wait_time` if not specified or `None` :rtype: generator :raises DOAPIError: if the API endpoint replies with an error :raises WaitTimeoutError: if ``wait_time`` is exceeded
[ "r", "Calls", "the", "fetch", "method", "of", "each", "object", "in", "objects", "periodically", "until", "the", "attr", "attribute", "of", "each", "one", "equals", "value", "yielding", "the", "final", "state", "of", "each", "object", "as", "soon", "as", "...
train
https://github.com/jwodder/doapi/blob/b1306de86a01d8ae7b9c1fe2699765bb82e4f310/doapi/doapi.py#L955-L1025
zenotech/MyCluster
mycluster/sge.py
min_tasks_per_node
def min_tasks_per_node(queue_id): """ This function is used when requesting non exclusive use as the parallel environment might enforce a minimum number of tasks """ parallel_env = queue_id.split(':')[0] queue_name = queue_id.split(':')[1] tasks = 1 pe_tasks = tasks with os.popen('qconf -sp ' + parallel_env) as f: try: for line in f: if line.split(' ')[0] == 'allocation_rule': # This may throw exception as allocation rule # may not always be an integer pe_tasks = int(re.split('\W+', line)[1]) except: pass return max(tasks, pe_tasks)
python
def min_tasks_per_node(queue_id): """ This function is used when requesting non exclusive use as the parallel environment might enforce a minimum number of tasks """ parallel_env = queue_id.split(':')[0] queue_name = queue_id.split(':')[1] tasks = 1 pe_tasks = tasks with os.popen('qconf -sp ' + parallel_env) as f: try: for line in f: if line.split(' ')[0] == 'allocation_rule': # This may throw exception as allocation rule # may not always be an integer pe_tasks = int(re.split('\W+', line)[1]) except: pass return max(tasks, pe_tasks)
[ "def", "min_tasks_per_node", "(", "queue_id", ")", ":", "parallel_env", "=", "queue_id", ".", "split", "(", "':'", ")", "[", "0", "]", "queue_name", "=", "queue_id", ".", "split", "(", "':'", ")", "[", "1", "]", "tasks", "=", "1", "pe_tasks", "=", "t...
This function is used when requesting non exclusive use as the parallel environment might enforce a minimum number of tasks
[ "This", "function", "is", "used", "when", "requesting", "non", "exclusive", "use", "as", "the", "parallel", "environment", "might", "enforce", "a", "minimum", "number", "of", "tasks" ]
train
https://github.com/zenotech/MyCluster/blob/d2b7e35c57a515926e83bbc083d26930cd67e1bd/mycluster/sge.py#L124-L144
NeuralEnsemble/lazyarray
lazyarray.py
check_shape
def check_shape(meth): """ Decorator for larray magic methods, to ensure that the operand has the same shape as the array. """ @wraps(meth) def wrapped_meth(self, val): if isinstance(val, (larray, numpy.ndarray)): if val.shape != self._shape: raise ValueError("shape mismatch: objects cannot be broadcast to a single shape") return meth(self, val) return wrapped_meth
python
def check_shape(meth): """ Decorator for larray magic methods, to ensure that the operand has the same shape as the array. """ @wraps(meth) def wrapped_meth(self, val): if isinstance(val, (larray, numpy.ndarray)): if val.shape != self._shape: raise ValueError("shape mismatch: objects cannot be broadcast to a single shape") return meth(self, val) return wrapped_meth
[ "def", "check_shape", "(", "meth", ")", ":", "@", "wraps", "(", "meth", ")", "def", "wrapped_meth", "(", "self", ",", "val", ")", ":", "if", "isinstance", "(", "val", ",", "(", "larray", ",", "numpy", ".", "ndarray", ")", ")", ":", "if", "val", "...
Decorator for larray magic methods, to ensure that the operand has the same shape as the array.
[ "Decorator", "for", "larray", "magic", "methods", "to", "ensure", "that", "the", "operand", "has", "the", "same", "shape", "as", "the", "array", "." ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L47-L58
NeuralEnsemble/lazyarray
lazyarray.py
partial_shape
def partial_shape(addr, full_shape): """ Calculate the size of the sub-array represented by `addr` """ def size(x, max): if isinstance(x, (int, long, numpy.integer)): return None elif isinstance(x, slice): y = min(max, x.stop or max) # slice limits can go past the bounds return 1 + (y - (x.start or 0) - 1) // (x.step or 1) elif isinstance(x, collections.Sized): if hasattr(x, 'dtype') and x.dtype == bool: return x.sum() else: return len(x) else: raise TypeError("Unsupported index type %s" % type(x)) addr = full_address(addr, full_shape) if isinstance(addr, numpy.ndarray) and addr.dtype == bool: return (addr.sum(),) elif all(isinstance(x, collections.Sized) for x in addr): return (len(addr[0]),) else: shape = [size(x, max) for (x, max) in zip(addr, full_shape)] return tuple([x for x in shape if x is not None])
python
def partial_shape(addr, full_shape): """ Calculate the size of the sub-array represented by `addr` """ def size(x, max): if isinstance(x, (int, long, numpy.integer)): return None elif isinstance(x, slice): y = min(max, x.stop or max) # slice limits can go past the bounds return 1 + (y - (x.start or 0) - 1) // (x.step or 1) elif isinstance(x, collections.Sized): if hasattr(x, 'dtype') and x.dtype == bool: return x.sum() else: return len(x) else: raise TypeError("Unsupported index type %s" % type(x)) addr = full_address(addr, full_shape) if isinstance(addr, numpy.ndarray) and addr.dtype == bool: return (addr.sum(),) elif all(isinstance(x, collections.Sized) for x in addr): return (len(addr[0]),) else: shape = [size(x, max) for (x, max) in zip(addr, full_shape)] return tuple([x for x in shape if x is not None])
[ "def", "partial_shape", "(", "addr", ",", "full_shape", ")", ":", "def", "size", "(", "x", ",", "max", ")", ":", "if", "isinstance", "(", "x", ",", "(", "int", ",", "long", ",", "numpy", ".", "integer", ")", ")", ":", "return", "None", "elif", "i...
Calculate the size of the sub-array represented by `addr`
[ "Calculate", "the", "size", "of", "the", "sub", "-", "array", "represented", "by", "addr" ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L82-L107
NeuralEnsemble/lazyarray
lazyarray.py
reverse
def reverse(func): """Given a function f(a, b), returns f(b, a)""" @wraps(func) def reversed_func(a, b): return func(b, a) reversed_func.__doc__ = "Reversed argument form of %s" % func.__doc__ reversed_func.__name__ = "reversed %s" % func.__name__ return reversed_func
python
def reverse(func): """Given a function f(a, b), returns f(b, a)""" @wraps(func) def reversed_func(a, b): return func(b, a) reversed_func.__doc__ = "Reversed argument form of %s" % func.__doc__ reversed_func.__name__ = "reversed %s" % func.__name__ return reversed_func
[ "def", "reverse", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "reversed_func", "(", "a", ",", "b", ")", ":", "return", "func", "(", "b", ",", "a", ")", "reversed_func", ".", "__doc__", "=", "\"Reversed argument form of %s\"", "%", "fu...
Given a function f(a, b), returns f(b, a)
[ "Given", "a", "function", "f", "(", "a", "b", ")", "returns", "f", "(", "b", "a", ")" ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L110-L117
NeuralEnsemble/lazyarray
lazyarray.py
_build_ufunc
def _build_ufunc(func): """Return a ufunc that works with lazy arrays""" def larray_compatible_ufunc(x): if isinstance(x, larray): y = deepcopy(x) y.apply(func) return y else: return func(x) return larray_compatible_ufunc
python
def _build_ufunc(func): """Return a ufunc that works with lazy arrays""" def larray_compatible_ufunc(x): if isinstance(x, larray): y = deepcopy(x) y.apply(func) return y else: return func(x) return larray_compatible_ufunc
[ "def", "_build_ufunc", "(", "func", ")", ":", "def", "larray_compatible_ufunc", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "larray", ")", ":", "y", "=", "deepcopy", "(", "x", ")", "y", ".", "apply", "(", "func", ")", "return", "y", "els...
Return a ufunc that works with lazy arrays
[ "Return", "a", "ufunc", "that", "works", "with", "lazy", "arrays" ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L540-L549
NeuralEnsemble/lazyarray
lazyarray.py
larray.is_homogeneous
def is_homogeneous(self): """True if all the elements of the array are the same.""" hom_base = isinstance(self.base_value, (int, long, numpy.integer, float, bool)) \ or type(self.base_value) == self.dtype \ or (isinstance(self.dtype, type) and isinstance(self.base_value, self.dtype)) hom_ops = all(obj.is_homogeneous for f, obj in self.operations if isinstance(obj, larray)) return hom_base and hom_ops
python
def is_homogeneous(self): """True if all the elements of the array are the same.""" hom_base = isinstance(self.base_value, (int, long, numpy.integer, float, bool)) \ or type(self.base_value) == self.dtype \ or (isinstance(self.dtype, type) and isinstance(self.base_value, self.dtype)) hom_ops = all(obj.is_homogeneous for f, obj in self.operations if isinstance(obj, larray)) return hom_base and hom_ops
[ "def", "is_homogeneous", "(", "self", ")", ":", "hom_base", "=", "isinstance", "(", "self", ".", "base_value", ",", "(", "int", ",", "long", ",", "numpy", ".", "integer", ",", "float", ",", "bool", ")", ")", "or", "type", "(", "self", ".", "base_valu...
True if all the elements of the array are the same.
[ "True", "if", "all", "the", "elements", "of", "the", "array", "are", "the", "same", "." ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L275-L281
NeuralEnsemble/lazyarray
lazyarray.py
larray._partially_evaluate
def _partially_evaluate(self, addr, simplify=False): """ Return part of the lazy array. """ if self.is_homogeneous: if simplify: base_val = self.base_value else: base_val = self._homogeneous_array(addr) * self.base_value elif isinstance(self.base_value, (int, long, numpy.integer, float, bool)): base_val = self._homogeneous_array(addr) * self.base_value elif isinstance(self.base_value, numpy.ndarray): base_val = self.base_value[addr] elif have_scipy and sparse.issparse(self.base_value): # For sparse matrices larr[2, :] base_val = self.base_value[addr] elif callable(self.base_value): indices = self._array_indices(addr) base_val = self.base_value(*indices) if isinstance(base_val, numpy.ndarray) and base_val.shape == (1,): base_val = base_val[0] elif hasattr(self.base_value, "lazily_evaluate"): base_val = self.base_value.lazily_evaluate(addr, shape=self._shape) elif isinstance(self.base_value, VectorizedIterable): partial_shape = self._partial_shape(addr) if partial_shape: n = reduce(operator.mul, partial_shape) else: n = 1 base_val = self.base_value.next(n) # note that the array contents will depend on the order of access to elements if n == 1: base_val = base_val[0] elif partial_shape and base_val.shape != partial_shape: base_val = base_val.reshape(partial_shape) elif isinstance(self.base_value, collections.Iterator): raise NotImplementedError("coming soon...") else: raise ValueError("invalid base value for array (%s)" % self.base_value) return self._apply_operations(base_val, addr, simplify=simplify)
python
def _partially_evaluate(self, addr, simplify=False): """ Return part of the lazy array. """ if self.is_homogeneous: if simplify: base_val = self.base_value else: base_val = self._homogeneous_array(addr) * self.base_value elif isinstance(self.base_value, (int, long, numpy.integer, float, bool)): base_val = self._homogeneous_array(addr) * self.base_value elif isinstance(self.base_value, numpy.ndarray): base_val = self.base_value[addr] elif have_scipy and sparse.issparse(self.base_value): # For sparse matrices larr[2, :] base_val = self.base_value[addr] elif callable(self.base_value): indices = self._array_indices(addr) base_val = self.base_value(*indices) if isinstance(base_val, numpy.ndarray) and base_val.shape == (1,): base_val = base_val[0] elif hasattr(self.base_value, "lazily_evaluate"): base_val = self.base_value.lazily_evaluate(addr, shape=self._shape) elif isinstance(self.base_value, VectorizedIterable): partial_shape = self._partial_shape(addr) if partial_shape: n = reduce(operator.mul, partial_shape) else: n = 1 base_val = self.base_value.next(n) # note that the array contents will depend on the order of access to elements if n == 1: base_val = base_val[0] elif partial_shape and base_val.shape != partial_shape: base_val = base_val.reshape(partial_shape) elif isinstance(self.base_value, collections.Iterator): raise NotImplementedError("coming soon...") else: raise ValueError("invalid base value for array (%s)" % self.base_value) return self._apply_operations(base_val, addr, simplify=simplify)
[ "def", "_partially_evaluate", "(", "self", ",", "addr", ",", "simplify", "=", "False", ")", ":", "if", "self", ".", "is_homogeneous", ":", "if", "simplify", ":", "base_val", "=", "self", ".", "base_value", "else", ":", "base_val", "=", "self", ".", "_hom...
Return part of the lazy array.
[ "Return", "part", "of", "the", "lazy", "array", "." ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L347-L384
NeuralEnsemble/lazyarray
lazyarray.py
larray.check_bounds
def check_bounds(self, addr): """ Check whether the given address is within the array bounds. """ def is_boolean_array(arr): return hasattr(arr, 'dtype') and arr.dtype == bool def check_axis(x, size): if isinstance(x, (int, long, numpy.integer)): lower = upper = x elif isinstance(x, slice): lower = x.start or 0 upper = min(x.stop or size - 1, size - 1) # slices are allowed to go past the bounds elif isinstance(x, collections.Sized): if is_boolean_array(x): lower = 0 upper = x.size - 1 else: if len(x) == 0: raise ValueError("Empty address component (address was %s)" % str(addr)) if hasattr(x, "min"): lower = x.min() else: lower = min(x) if hasattr(x, "max"): upper = x.max() else: upper = max(x) else: raise TypeError("Invalid array address: %s (element of type %s)" % (str(addr), type(x))) if (lower < -size) or (upper >= size): raise IndexError("Index out of bounds") full_addr = self._full_address(addr) if isinstance(addr, numpy.ndarray) and addr.dtype == bool: if len(addr.shape) > len(self._shape): raise IndexError("Too many indices for array") for xmax, size in zip(addr.shape, self._shape): upper = xmax - 1 if upper >= size: raise IndexError("Index out of bounds") else: for i, size in zip(full_addr, self._shape): check_axis(i, size)
python
def check_bounds(self, addr): """ Check whether the given address is within the array bounds. """ def is_boolean_array(arr): return hasattr(arr, 'dtype') and arr.dtype == bool def check_axis(x, size): if isinstance(x, (int, long, numpy.integer)): lower = upper = x elif isinstance(x, slice): lower = x.start or 0 upper = min(x.stop or size - 1, size - 1) # slices are allowed to go past the bounds elif isinstance(x, collections.Sized): if is_boolean_array(x): lower = 0 upper = x.size - 1 else: if len(x) == 0: raise ValueError("Empty address component (address was %s)" % str(addr)) if hasattr(x, "min"): lower = x.min() else: lower = min(x) if hasattr(x, "max"): upper = x.max() else: upper = max(x) else: raise TypeError("Invalid array address: %s (element of type %s)" % (str(addr), type(x))) if (lower < -size) or (upper >= size): raise IndexError("Index out of bounds") full_addr = self._full_address(addr) if isinstance(addr, numpy.ndarray) and addr.dtype == bool: if len(addr.shape) > len(self._shape): raise IndexError("Too many indices for array") for xmax, size in zip(addr.shape, self._shape): upper = xmax - 1 if upper >= size: raise IndexError("Index out of bounds") else: for i, size in zip(full_addr, self._shape): check_axis(i, size)
[ "def", "check_bounds", "(", "self", ",", "addr", ")", ":", "def", "is_boolean_array", "(", "arr", ")", ":", "return", "hasattr", "(", "arr", ",", "'dtype'", ")", "and", "arr", ".", "dtype", "==", "bool", "def", "check_axis", "(", "x", ",", "size", ")...
Check whether the given address is within the array bounds.
[ "Check", "whether", "the", "given", "address", "is", "within", "the", "array", "bounds", "." ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L387-L429
NeuralEnsemble/lazyarray
lazyarray.py
larray.evaluate
def evaluate(self, simplify=False, empty_val=0): """ Return the lazy array as a real NumPy array. If the array is homogeneous and ``simplify`` is ``True``, return a single numerical value. """ # need to catch the situation where a generator-based larray is evaluated a second time if self.is_homogeneous: if simplify: x = self.base_value else: x = self.base_value * numpy.ones(self._shape, dtype=self.dtype) elif isinstance(self.base_value, (int, long, numpy.integer, float, bool, numpy.bool_)): x = self.base_value * numpy.ones(self._shape, dtype=self.dtype) elif isinstance(self.base_value, numpy.ndarray): x = self.base_value elif callable(self.base_value): x = numpy.array(numpy.fromfunction(self.base_value, shape=self._shape, dtype=int), dtype=self.dtype) elif hasattr(self.base_value, "lazily_evaluate"): x = self.base_value.lazily_evaluate(shape=self._shape) elif isinstance(self.base_value, VectorizedIterable): x = self.base_value.next(self.size) if x.shape != self._shape: x = x.reshape(self._shape) elif have_scipy and sparse.issparse(self.base_value): # For sparse matrices if empty_val!=0: x = self.base_value.toarray((sparse.csc_matrix)) x = numpy.where(x, x, numpy.nan) else: x = self.base_value.toarray((sparse.csc_matrix)) elif isinstance(self.base_value, collections.Iterator): x = numpy.fromiter(self.base_value, dtype=self.dtype or float, count=self.size) if x.shape != self._shape: x = x.reshape(self._shape) else: raise ValueError("invalid base value for array") return self._apply_operations(x, simplify=simplify)
python
def evaluate(self, simplify=False, empty_val=0): """ Return the lazy array as a real NumPy array. If the array is homogeneous and ``simplify`` is ``True``, return a single numerical value. """ # need to catch the situation where a generator-based larray is evaluated a second time if self.is_homogeneous: if simplify: x = self.base_value else: x = self.base_value * numpy.ones(self._shape, dtype=self.dtype) elif isinstance(self.base_value, (int, long, numpy.integer, float, bool, numpy.bool_)): x = self.base_value * numpy.ones(self._shape, dtype=self.dtype) elif isinstance(self.base_value, numpy.ndarray): x = self.base_value elif callable(self.base_value): x = numpy.array(numpy.fromfunction(self.base_value, shape=self._shape, dtype=int), dtype=self.dtype) elif hasattr(self.base_value, "lazily_evaluate"): x = self.base_value.lazily_evaluate(shape=self._shape) elif isinstance(self.base_value, VectorizedIterable): x = self.base_value.next(self.size) if x.shape != self._shape: x = x.reshape(self._shape) elif have_scipy and sparse.issparse(self.base_value): # For sparse matrices if empty_val!=0: x = self.base_value.toarray((sparse.csc_matrix)) x = numpy.where(x, x, numpy.nan) else: x = self.base_value.toarray((sparse.csc_matrix)) elif isinstance(self.base_value, collections.Iterator): x = numpy.fromiter(self.base_value, dtype=self.dtype or float, count=self.size) if x.shape != self._shape: x = x.reshape(self._shape) else: raise ValueError("invalid base value for array") return self._apply_operations(x, simplify=simplify)
[ "def", "evaluate", "(", "self", ",", "simplify", "=", "False", ",", "empty_val", "=", "0", ")", ":", "# need to catch the situation where a generator-based larray is evaluated a second time", "if", "self", ".", "is_homogeneous", ":", "if", "simplify", ":", "x", "=", ...
Return the lazy array as a real NumPy array. If the array is homogeneous and ``simplify`` is ``True``, return a single numerical value.
[ "Return", "the", "lazy", "array", "as", "a", "real", "NumPy", "array", "." ]
train
https://github.com/NeuralEnsemble/lazyarray/blob/391a4cef3be85309c36adac0c17824de3d82f5be/lazyarray.py#L459-L496
miku/gluish
gluish/task.py
is_closest_date_parameter
def is_closest_date_parameter(task, param_name): """ Return the parameter class of param_name on task. """ for name, obj in task.get_params(): if name == param_name: return hasattr(obj, 'use_closest_date') return False
python
def is_closest_date_parameter(task, param_name): """ Return the parameter class of param_name on task. """ for name, obj in task.get_params(): if name == param_name: return hasattr(obj, 'use_closest_date') return False
[ "def", "is_closest_date_parameter", "(", "task", ",", "param_name", ")", ":", "for", "name", ",", "obj", "in", "task", ".", "get_params", "(", ")", ":", "if", "name", "==", "param_name", ":", "return", "hasattr", "(", "obj", ",", "'use_closest_date'", ")",...
Return the parameter class of param_name on task.
[ "Return", "the", "parameter", "class", "of", "param_name", "on", "task", "." ]
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L46-L51
miku/gluish
gluish/task.py
delistify
def delistify(x): """ A basic slug version of a given parameter list. """ if isinstance(x, list): x = [e.replace("'", "") for e in x] return '-'.join(sorted(x)) return x
python
def delistify(x): """ A basic slug version of a given parameter list. """ if isinstance(x, list): x = [e.replace("'", "") for e in x] return '-'.join(sorted(x)) return x
[ "def", "delistify", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "list", ")", ":", "x", "=", "[", "e", ".", "replace", "(", "\"'\"", ",", "\"\"", ")", "for", "e", "in", "x", "]", "return", "'-'", ".", "join", "(", "sorted", "(", "x"...
A basic slug version of a given parameter list.
[ "A", "basic", "slug", "version", "of", "a", "given", "parameter", "list", "." ]
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L53-L58
miku/gluish
gluish/task.py
BaseTask.effective_task_id
def effective_task_id(self): """ Replace date in task id with closest date. """ params = self.param_kwargs if 'date' in params and is_closest_date_parameter(self, 'date'): params['date'] = self.closest() task_id_parts = sorted(['%s=%s' % (k, str(v)) for k, v in params.items()]) return '%s(%s)' % (self.task_family, ', '.join(task_id_parts)) else: return self.task_id
python
def effective_task_id(self): """ Replace date in task id with closest date. """ params = self.param_kwargs if 'date' in params and is_closest_date_parameter(self, 'date'): params['date'] = self.closest() task_id_parts = sorted(['%s=%s' % (k, str(v)) for k, v in params.items()]) return '%s(%s)' % (self.task_family, ', '.join(task_id_parts)) else: return self.task_id
[ "def", "effective_task_id", "(", "self", ")", ":", "params", "=", "self", ".", "param_kwargs", "if", "'date'", "in", "params", "and", "is_closest_date_parameter", "(", "self", ",", "'date'", ")", ":", "params", "[", "'date'", "]", "=", "self", ".", "closes...
Replace date in task id with closest date.
[ "Replace", "date", "in", "task", "id", "with", "closest", "date", "." ]
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L75-L83
miku/gluish
gluish/task.py
BaseTask.taskdir
def taskdir(self): """ Return the directory under which all artefacts are stored. """ return os.path.join(self.BASE, self.TAG, self.task_family)
python
def taskdir(self): """ Return the directory under which all artefacts are stored. """ return os.path.join(self.BASE, self.TAG, self.task_family)
[ "def", "taskdir", "(", "self", ")", ":", "return", "os", ".", "path", ".", "join", "(", "self", ".", "BASE", ",", "self", ".", "TAG", ",", "self", ".", "task_family", ")" ]
Return the directory under which all artefacts are stored.
[ "Return", "the", "directory", "under", "which", "all", "artefacts", "are", "stored", "." ]
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L85-L87
miku/gluish
gluish/task.py
BaseTask.path
def path(self, filename=None, ext='tsv', digest=False, shard=False, encoding='utf-8'): """ Return the path for this class with a certain set of parameters. `ext` sets the extension of the file. If `hash` is true, the filename (w/o extenstion) will be hashed. If `shard` is true, the files are placed in shards, based on the first two chars of the filename (hashed). """ if self.BASE is NotImplemented: raise RuntimeError('BASE directory must be set.') params = dict(self.get_params()) if filename is None: parts = [] for name, param in self.get_params(): if not param.significant: continue if name == 'date' and is_closest_date_parameter(self, 'date'): parts.append('date-%s' % self.closest()) continue if hasattr(param, 'is_list') and param.is_list: es = '-'.join([str(v) for v in getattr(self, name)]) parts.append('%s-%s' % (name, es)) continue val = getattr(self, name) if isinstance(val, datetime.datetime): val = val.strftime('%Y-%m-%dT%H%M%S') elif isinstance(val, datetime.date): val = val.strftime('%Y-%m-%d') parts.append('%s-%s' % (name, val)) name = '-'.join(sorted(parts)) if len(name) == 0: name = 'output' if digest: name = hashlib.sha1(name.encode(encoding)).hexdigest() if not ext: filename = '{fn}'.format(ext=ext, fn=name) else: filename = '{fn}.{ext}'.format(ext=ext, fn=name) if shard: prefix = hashlib.sha1(filename.encode(encoding)).hexdigest()[:2] return os.path.join(self.BASE, self.TAG, self.task_family, prefix, filename) return os.path.join(self.BASE, self.TAG, self.task_family, filename)
python
def path(self, filename=None, ext='tsv', digest=False, shard=False, encoding='utf-8'): """ Return the path for this class with a certain set of parameters. `ext` sets the extension of the file. If `hash` is true, the filename (w/o extenstion) will be hashed. If `shard` is true, the files are placed in shards, based on the first two chars of the filename (hashed). """ if self.BASE is NotImplemented: raise RuntimeError('BASE directory must be set.') params = dict(self.get_params()) if filename is None: parts = [] for name, param in self.get_params(): if not param.significant: continue if name == 'date' and is_closest_date_parameter(self, 'date'): parts.append('date-%s' % self.closest()) continue if hasattr(param, 'is_list') and param.is_list: es = '-'.join([str(v) for v in getattr(self, name)]) parts.append('%s-%s' % (name, es)) continue val = getattr(self, name) if isinstance(val, datetime.datetime): val = val.strftime('%Y-%m-%dT%H%M%S') elif isinstance(val, datetime.date): val = val.strftime('%Y-%m-%d') parts.append('%s-%s' % (name, val)) name = '-'.join(sorted(parts)) if len(name) == 0: name = 'output' if digest: name = hashlib.sha1(name.encode(encoding)).hexdigest() if not ext: filename = '{fn}'.format(ext=ext, fn=name) else: filename = '{fn}.{ext}'.format(ext=ext, fn=name) if shard: prefix = hashlib.sha1(filename.encode(encoding)).hexdigest()[:2] return os.path.join(self.BASE, self.TAG, self.task_family, prefix, filename) return os.path.join(self.BASE, self.TAG, self.task_family, filename)
[ "def", "path", "(", "self", ",", "filename", "=", "None", ",", "ext", "=", "'tsv'", ",", "digest", "=", "False", ",", "shard", "=", "False", ",", "encoding", "=", "'utf-8'", ")", ":", "if", "self", ".", "BASE", "is", "NotImplemented", ":", "raise", ...
Return the path for this class with a certain set of parameters. `ext` sets the extension of the file. If `hash` is true, the filename (w/o extenstion) will be hashed. If `shard` is true, the files are placed in shards, based on the first two chars of the filename (hashed).
[ "Return", "the", "path", "for", "this", "class", "with", "a", "certain", "set", "of", "parameters", ".", "ext", "sets", "the", "extension", "of", "the", "file", ".", "If", "hash", "is", "true", "the", "filename", "(", "w", "/", "o", "extenstion", ")", ...
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L89-L138
miku/gluish
gluish/task.py
MockTask.run
def run(self): """ Just copy the fixture, so we have some output. """ luigi.LocalTarget(path=self.fixture).copy(self.output().path)
python
def run(self): """ Just copy the fixture, so we have some output. """ luigi.LocalTarget(path=self.fixture).copy(self.output().path)
[ "def", "run", "(", "self", ")", ":", "luigi", ".", "LocalTarget", "(", "path", "=", "self", ".", "fixture", ")", ".", "copy", "(", "self", ".", "output", "(", ")", ".", "path", ")" ]
Just copy the fixture, so we have some output.
[ "Just", "copy", "the", "fixture", "so", "we", "have", "some", "output", "." ]
train
https://github.com/miku/gluish/blob/56d3ac4f41a944e31ecac0aa3b6d1dc2ce705e29/gluish/task.py#L149-L151
rande/python-simple-ioc
ioc/event.py
Dispatcher.get_listeners
def get_listeners(self, name): """ Return the callables related to name """ return list(map(lambda listener: listener[0], self.listeners[name]))
python
def get_listeners(self, name): """ Return the callables related to name """ return list(map(lambda listener: listener[0], self.listeners[name]))
[ "def", "get_listeners", "(", "self", ",", "name", ")", ":", "return", "list", "(", "map", "(", "lambda", "listener", ":", "listener", "[", "0", "]", ",", "self", ".", "listeners", "[", "name", "]", ")", ")" ]
Return the callables related to name
[ "Return", "the", "callables", "related", "to", "name" ]
train
https://github.com/rande/python-simple-ioc/blob/36ddf667c1213a07a53cd4cdd708d02494e5190b/ioc/event.py#L68-L72
rande/python-simple-ioc
ioc/event.py
Dispatcher.add_listener
def add_listener(self, name, listener, priority=0): """ Add a new listener to the dispatch """ if name not in self.listeners: self.listeners[name] = [] self.listeners[name].append((listener, priority)) # reorder event self.listeners[name].sort(key=lambda listener: listener[1], reverse=True)
python
def add_listener(self, name, listener, priority=0): """ Add a new listener to the dispatch """ if name not in self.listeners: self.listeners[name] = [] self.listeners[name].append((listener, priority)) # reorder event self.listeners[name].sort(key=lambda listener: listener[1], reverse=True)
[ "def", "add_listener", "(", "self", ",", "name", ",", "listener", ",", "priority", "=", "0", ")", ":", "if", "name", "not", "in", "self", ".", "listeners", ":", "self", ".", "listeners", "[", "name", "]", "=", "[", "]", "self", ".", "listeners", "[...
Add a new listener to the dispatch
[ "Add", "a", "new", "listener", "to", "the", "dispatch" ]
train
https://github.com/rande/python-simple-ioc/blob/36ddf667c1213a07a53cd4cdd708d02494e5190b/ioc/event.py#L74-L84
inveniosoftware/dcxml
dcxml/simpledc.py
dump_etree
def dump_etree(data, container=None, nsmap=None, attribs=None): """Convert dictionary to Simple Dublin Core XML as ElementTree. :param data: Dictionary. :param container: Name (include namespace) of container element. :param nsmap: Namespace mapping for lxml. :param attribs: Default attributes for container element. :returns: LXML ElementTree. """ container = container or container_element nsmap = nsmap or ns attribs = attribs or container_attribs return dump_etree_helper(container, data, rules, nsmap, attribs)
python
def dump_etree(data, container=None, nsmap=None, attribs=None): """Convert dictionary to Simple Dublin Core XML as ElementTree. :param data: Dictionary. :param container: Name (include namespace) of container element. :param nsmap: Namespace mapping for lxml. :param attribs: Default attributes for container element. :returns: LXML ElementTree. """ container = container or container_element nsmap = nsmap or ns attribs = attribs or container_attribs return dump_etree_helper(container, data, rules, nsmap, attribs)
[ "def", "dump_etree", "(", "data", ",", "container", "=", "None", ",", "nsmap", "=", "None", ",", "attribs", "=", "None", ")", ":", "container", "=", "container", "or", "container_element", "nsmap", "=", "nsmap", "or", "ns", "attribs", "=", "attribs", "or...
Convert dictionary to Simple Dublin Core XML as ElementTree. :param data: Dictionary. :param container: Name (include namespace) of container element. :param nsmap: Namespace mapping for lxml. :param attribs: Default attributes for container element. :returns: LXML ElementTree.
[ "Convert", "dictionary", "to", "Simple", "Dublin", "Core", "XML", "as", "ElementTree", "." ]
train
https://github.com/inveniosoftware/dcxml/blob/9fed6123ec0f3f2e2f645ff91653a7e86a39138d/dcxml/simpledc.py#L43-L55
inveniosoftware/dcxml
dcxml/simpledc.py
rule_factory
def rule_factory(plural, singular): """Element rule factory.""" @rules.rule(plural) def f(path, values): for v in values: if v: elem = etree.Element( '{{http://purl.org/dc/elements/1.1/}}{0}'.format(singular)) elem.text = v yield elem f.__name__ = plural return f
python
def rule_factory(plural, singular): """Element rule factory.""" @rules.rule(plural) def f(path, values): for v in values: if v: elem = etree.Element( '{{http://purl.org/dc/elements/1.1/}}{0}'.format(singular)) elem.text = v yield elem f.__name__ = plural return f
[ "def", "rule_factory", "(", "plural", ",", "singular", ")", ":", "@", "rules", ".", "rule", "(", "plural", ")", "def", "f", "(", "path", ",", "values", ")", ":", "for", "v", "in", "values", ":", "if", "v", ":", "elem", "=", "etree", ".", "Element...
Element rule factory.
[ "Element", "rule", "factory", "." ]
train
https://github.com/inveniosoftware/dcxml/blob/9fed6123ec0f3f2e2f645ff91653a7e86a39138d/dcxml/simpledc.py#L70-L81
twisted/mantissa
xmantissa/stats.py
RemoteStatsCollector.startReceivingBoxes
def startReceivingBoxes(self, sender): """ Start observing log events for stat events to send. """ AMP.startReceivingBoxes(self, sender) log.addObserver(self._emit)
python
def startReceivingBoxes(self, sender): """ Start observing log events for stat events to send. """ AMP.startReceivingBoxes(self, sender) log.addObserver(self._emit)
[ "def", "startReceivingBoxes", "(", "self", ",", "sender", ")", ":", "AMP", ".", "startReceivingBoxes", "(", "self", ",", "sender", ")", "log", ".", "addObserver", "(", "self", ".", "_emit", ")" ]
Start observing log events for stat events to send.
[ "Start", "observing", "log", "events", "for", "stat", "events", "to", "send", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/stats.py#L68-L73
twisted/mantissa
xmantissa/stats.py
RemoteStatsCollector.stopReceivingBoxes
def stopReceivingBoxes(self, reason): """ Stop observing log events. """ AMP.stopReceivingBoxes(self, reason) log.removeObserver(self._emit)
python
def stopReceivingBoxes(self, reason): """ Stop observing log events. """ AMP.stopReceivingBoxes(self, reason) log.removeObserver(self._emit)
[ "def", "stopReceivingBoxes", "(", "self", ",", "reason", ")", ":", "AMP", ".", "stopReceivingBoxes", "(", "self", ",", "reason", ")", "log", ".", "removeObserver", "(", "self", ".", "_emit", ")" ]
Stop observing log events.
[ "Stop", "observing", "log", "events", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/stats.py#L76-L81
twisted/mantissa
xmantissa/stats.py
RemoteStatsCollector._emit
def _emit(self, event): """ If the given event is a stat event, send a I{StatUpdate} command. """ if (event.get('interface') is not iaxiom.IStatEvent and 'athena_send_messages' not in event and 'athena_received_messages' not in event): return out = [] for k, v in event.iteritems(): if k in ('system', 'message', 'interface', 'isError'): continue if not isinstance(v, unicode): v = str(v).decode('ascii') out.append(dict(key=k.decode('ascii'), value=v)) self.callRemote(StatUpdate, data=out)
python
def _emit(self, event): """ If the given event is a stat event, send a I{StatUpdate} command. """ if (event.get('interface') is not iaxiom.IStatEvent and 'athena_send_messages' not in event and 'athena_received_messages' not in event): return out = [] for k, v in event.iteritems(): if k in ('system', 'message', 'interface', 'isError'): continue if not isinstance(v, unicode): v = str(v).decode('ascii') out.append(dict(key=k.decode('ascii'), value=v)) self.callRemote(StatUpdate, data=out)
[ "def", "_emit", "(", "self", ",", "event", ")", ":", "if", "(", "event", ".", "get", "(", "'interface'", ")", "is", "not", "iaxiom", ".", "IStatEvent", "and", "'athena_send_messages'", "not", "in", "event", "and", "'athena_received_messages'", "not", "in", ...
If the given event is a stat event, send a I{StatUpdate} command.
[ "If", "the", "given", "event", "is", "a", "stat", "event", "send", "a", "I", "{", "StatUpdate", "}", "command", "." ]
train
https://github.com/twisted/mantissa/blob/53e5502aba23ce99be78b27f923a276593033fe8/xmantissa/stats.py#L84-L100
openvax/datacache
datacache/common.py
normalize_filename
def normalize_filename(filename): """ Remove special characters and shorten if name is too long """ # if the url pointed to a directory then just replace all the special chars filename = re.sub("/|\\|;|:|\?|=", "_", filename) if len(filename) > 150: prefix = hashlib.md5(filename).hexdigest() filename = prefix + filename[-140:] return filename
python
def normalize_filename(filename): """ Remove special characters and shorten if name is too long """ # if the url pointed to a directory then just replace all the special chars filename = re.sub("/|\\|;|:|\?|=", "_", filename) if len(filename) > 150: prefix = hashlib.md5(filename).hexdigest() filename = prefix + filename[-140:] return filename
[ "def", "normalize_filename", "(", "filename", ")", ":", "# if the url pointed to a directory then just replace all the special chars", "filename", "=", "re", ".", "sub", "(", "\"/|\\\\|;|:|\\?|=\"", ",", "\"_\"", ",", "filename", ")", "if", "len", "(", "filename", ")", ...
Remove special characters and shorten if name is too long
[ "Remove", "special", "characters", "and", "shorten", "if", "name", "is", "too", "long" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/common.py#L47-L58
openvax/datacache
datacache/common.py
build_local_filename
def build_local_filename(download_url=None, filename=None, decompress=False): """ Determine which local filename to use based on the file's source URL, an optional desired filename, and whether a compression suffix needs to be removed """ assert download_url or filename, "Either filename or URL must be specified" # if no filename provided, use the original filename on the server if not filename: digest = hashlib.md5(download_url.encode('utf-8')).hexdigest() parts = split(download_url) filename = digest + "." + "_".join(parts) filename = normalize_filename(filename) if decompress: (base, ext) = splitext(filename) if ext in (".gz", ".zip"): filename = base return filename
python
def build_local_filename(download_url=None, filename=None, decompress=False): """ Determine which local filename to use based on the file's source URL, an optional desired filename, and whether a compression suffix needs to be removed """ assert download_url or filename, "Either filename or URL must be specified" # if no filename provided, use the original filename on the server if not filename: digest = hashlib.md5(download_url.encode('utf-8')).hexdigest() parts = split(download_url) filename = digest + "." + "_".join(parts) filename = normalize_filename(filename) if decompress: (base, ext) = splitext(filename) if ext in (".gz", ".zip"): filename = base return filename
[ "def", "build_local_filename", "(", "download_url", "=", "None", ",", "filename", "=", "None", ",", "decompress", "=", "False", ")", ":", "assert", "download_url", "or", "filename", ",", "\"Either filename or URL must be specified\"", "# if no filename provided, use the o...
Determine which local filename to use based on the file's source URL, an optional desired filename, and whether a compression suffix needs to be removed
[ "Determine", "which", "local", "filename", "to", "use", "based", "on", "the", "file", "s", "source", "URL", "an", "optional", "desired", "filename", "and", "whether", "a", "compression", "suffix", "needs", "to", "be", "removed" ]
train
https://github.com/openvax/datacache/blob/73bcac02d37cf153710a07fbdc636aa55cb214ca/datacache/common.py#L60-L81
mozilla-releng/signtool
signtool/util/file.py
compare
def compare(file1, file2): """compares the contents of two files, passed in either as open file handles or accessible file paths. Does a simple naive string comparison, so do not use on larger files""" if isinstance(file1, six.string_types): # pragma: no branch file1 = open(file1, 'r', True) if isinstance(file2, six.string_types): # pragma: no branch file2 = open(file2, 'r', True) file1_contents = file1.read() file2_contents = file2.read() return file1_contents == file2_contents
python
def compare(file1, file2): """compares the contents of two files, passed in either as open file handles or accessible file paths. Does a simple naive string comparison, so do not use on larger files""" if isinstance(file1, six.string_types): # pragma: no branch file1 = open(file1, 'r', True) if isinstance(file2, six.string_types): # pragma: no branch file2 = open(file2, 'r', True) file1_contents = file1.read() file2_contents = file2.read() return file1_contents == file2_contents
[ "def", "compare", "(", "file1", ",", "file2", ")", ":", "if", "isinstance", "(", "file1", ",", "six", ".", "string_types", ")", ":", "# pragma: no branch", "file1", "=", "open", "(", "file1", ",", "'r'", ",", "True", ")", "if", "isinstance", "(", "file...
compares the contents of two files, passed in either as open file handles or accessible file paths. Does a simple naive string comparison, so do not use on larger files
[ "compares", "the", "contents", "of", "two", "files", "passed", "in", "either", "as", "open", "file", "handles", "or", "accessible", "file", "paths", ".", "Does", "a", "simple", "naive", "string", "comparison", "so", "do", "not", "use", "on", "larger", "fil...
train
https://github.com/mozilla-releng/signtool/blob/0a778778a181cb9cab424b29fa104b70345f53c2/signtool/util/file.py#L11-L21
mozilla-releng/signtool
signtool/util/file.py
sha1sum
def sha1sum(f): """Return the SHA-1 hash of the contents of file `f`, in hex format""" h = hashlib.sha1() fp = open(f, 'rb') while True: block = fp.read(512 * 1024) if not block: break h.update(block) return h.hexdigest()
python
def sha1sum(f): """Return the SHA-1 hash of the contents of file `f`, in hex format""" h = hashlib.sha1() fp = open(f, 'rb') while True: block = fp.read(512 * 1024) if not block: break h.update(block) return h.hexdigest()
[ "def", "sha1sum", "(", "f", ")", ":", "h", "=", "hashlib", ".", "sha1", "(", ")", "fp", "=", "open", "(", "f", ",", "'rb'", ")", "while", "True", ":", "block", "=", "fp", ".", "read", "(", "512", "*", "1024", ")", "if", "not", "block", ":", ...
Return the SHA-1 hash of the contents of file `f`, in hex format
[ "Return", "the", "SHA", "-", "1", "hash", "of", "the", "contents", "of", "file", "f", "in", "hex", "format" ]
train
https://github.com/mozilla-releng/signtool/blob/0a778778a181cb9cab424b29fa104b70345f53c2/signtool/util/file.py#L24-L33
mozilla-releng/signtool
signtool/util/file.py
safe_copyfile
def safe_copyfile(src, dest): """safely copy src to dest using a temporary intermediate and then renaming to dest""" fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(dest)) shutil.copyfileobj(open(src, 'rb'), os.fdopen(fd, 'wb')) shutil.copystat(src, tmpname) os.rename(tmpname, dest)
python
def safe_copyfile(src, dest): """safely copy src to dest using a temporary intermediate and then renaming to dest""" fd, tmpname = tempfile.mkstemp(dir=os.path.dirname(dest)) shutil.copyfileobj(open(src, 'rb'), os.fdopen(fd, 'wb')) shutil.copystat(src, tmpname) os.rename(tmpname, dest)
[ "def", "safe_copyfile", "(", "src", ",", "dest", ")", ":", "fd", ",", "tmpname", "=", "tempfile", ".", "mkstemp", "(", "dir", "=", "os", ".", "path", ".", "dirname", "(", "dest", ")", ")", "shutil", ".", "copyfileobj", "(", "open", "(", "src", ",",...
safely copy src to dest using a temporary intermediate and then renaming to dest
[ "safely", "copy", "src", "to", "dest", "using", "a", "temporary", "intermediate", "and", "then", "renaming", "to", "dest" ]
train
https://github.com/mozilla-releng/signtool/blob/0a778778a181cb9cab424b29fa104b70345f53c2/signtool/util/file.py#L36-L42
KeplerGO/K2fov
K2fov/greatcircle.py
sphericalAngSep
def sphericalAngSep(ra0, dec0, ra1, dec1, radians=False): """ Compute the spherical angular separation between two points on the sky. //Taken from http://www.movable-type.co.uk/scripts/gis-faq-5.1.html NB: For small distances you can probably use sqrt( dDec**2 + cos^2(dec)*dRa) where dDec = dec1 - dec0 and dRa = ra1 - ra0 and dec1 \approx dec \approx dec0 """ if radians==False: ra0 = np.radians(ra0) dec0 = np.radians(dec0) ra1 = np.radians(ra1) dec1 = np.radians(dec1) deltaRa= ra1-ra0 deltaDec= dec1-dec0 val = haversine(deltaDec) val += np.cos(dec0) * np.cos(dec1) * haversine(deltaRa) val = min(1, np.sqrt(val)) ; #Guard against round off error? val = 2*np.arcsin(val) #Convert back to degrees if necessary if radians==False: val = np.degrees(val) return val
python
def sphericalAngSep(ra0, dec0, ra1, dec1, radians=False): """ Compute the spherical angular separation between two points on the sky. //Taken from http://www.movable-type.co.uk/scripts/gis-faq-5.1.html NB: For small distances you can probably use sqrt( dDec**2 + cos^2(dec)*dRa) where dDec = dec1 - dec0 and dRa = ra1 - ra0 and dec1 \approx dec \approx dec0 """ if radians==False: ra0 = np.radians(ra0) dec0 = np.radians(dec0) ra1 = np.radians(ra1) dec1 = np.radians(dec1) deltaRa= ra1-ra0 deltaDec= dec1-dec0 val = haversine(deltaDec) val += np.cos(dec0) * np.cos(dec1) * haversine(deltaRa) val = min(1, np.sqrt(val)) ; #Guard against round off error? val = 2*np.arcsin(val) #Convert back to degrees if necessary if radians==False: val = np.degrees(val) return val
[ "def", "sphericalAngSep", "(", "ra0", ",", "dec0", ",", "ra1", ",", "dec1", ",", "radians", "=", "False", ")", ":", "if", "radians", "==", "False", ":", "ra0", "=", "np", ".", "radians", "(", "ra0", ")", "dec0", "=", "np", ".", "radians", "(", "d...
Compute the spherical angular separation between two points on the sky. //Taken from http://www.movable-type.co.uk/scripts/gis-faq-5.1.html NB: For small distances you can probably use sqrt( dDec**2 + cos^2(dec)*dRa) where dDec = dec1 - dec0 and dRa = ra1 - ra0 and dec1 \approx dec \approx dec0
[ "Compute", "the", "spherical", "angular", "separation", "between", "two", "points", "on", "the", "sky", "." ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/greatcircle.py#L10-L42
KeplerGO/K2fov
K2fov/greatcircle.py
sphericalAngSepFast
def sphericalAngSepFast(ra0, dec0, ra1, dec1, radians=False): """A faster (but less accurate) implementation of sphericalAngleSep Taken from http://www.movable-type.co.uk/scripts/latlong.html For additional speed, set wantSquare=True, and the return value is the square of the separation """ if radians==False: ra0 = np.radians(ra0) dec0 = np.radians(dec0) ra1 = np.radians(ra1) dec1 = np.radians(dec1) deltaRa= ra1-ra0 deltaDec= dec1-dec0 avgDec = .5*(dec0+dec1) x = deltaRa*np.cos(avgDec) val = np.hypot(x, deltaDec) if radians == False: val = np.degrees(val) return val
python
def sphericalAngSepFast(ra0, dec0, ra1, dec1, radians=False): """A faster (but less accurate) implementation of sphericalAngleSep Taken from http://www.movable-type.co.uk/scripts/latlong.html For additional speed, set wantSquare=True, and the return value is the square of the separation """ if radians==False: ra0 = np.radians(ra0) dec0 = np.radians(dec0) ra1 = np.radians(ra1) dec1 = np.radians(dec1) deltaRa= ra1-ra0 deltaDec= dec1-dec0 avgDec = .5*(dec0+dec1) x = deltaRa*np.cos(avgDec) val = np.hypot(x, deltaDec) if radians == False: val = np.degrees(val) return val
[ "def", "sphericalAngSepFast", "(", "ra0", ",", "dec0", ",", "ra1", ",", "dec1", ",", "radians", "=", "False", ")", ":", "if", "radians", "==", "False", ":", "ra0", "=", "np", ".", "radians", "(", "ra0", ")", "dec0", "=", "np", ".", "radians", "(", ...
A faster (but less accurate) implementation of sphericalAngleSep Taken from http://www.movable-type.co.uk/scripts/latlong.html For additional speed, set wantSquare=True, and the return value is the square of the separation
[ "A", "faster", "(", "but", "less", "accurate", ")", "implementation", "of", "sphericalAngleSep" ]
train
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/greatcircle.py#L45-L70