body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
|---|---|---|---|---|---|---|---|
def voice(self, roomJID):
'\n Request voice for a moderated room.\n\n @param roomJID: The room jabber/xmpp entity id.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
message = VoiceRequest(recipient=roomJID)
self.xmlstream.send(message.toElement())
| 1,138,415,530,318,392,200
|
Request voice for a moderated room.
@param roomJID: The room jabber/xmpp entity id.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
voice
|
Gandi/wokkel
|
python
|
def voice(self, roomJID):
'\n Request voice for a moderated room.\n\n @param roomJID: The room jabber/xmpp entity id.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
message = VoiceRequest(recipient=roomJID)
self.xmlstream.send(message.toElement())
|
def history(self, roomJID, messages):
"\n Send history to create a MUC based on a one on one chat.\n\n See: http://xmpp.org/extensions/xep-0045.html#continue\n\n @param roomJID: The room jabber/xmpp entity id.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param messages: The history to send to the room as an ordered list of\n message, represented by a dictionary with the keys\n C{'stanza'}, holding the original stanza a\n L{domish.Element}, and C{'timestamp'} with the\n timestamp.\n @type messages: C{list} of L{domish.Element}\n "
for message in messages:
stanza = message['stanza']
stanza['type'] = 'groupchat'
delay = Delay(stamp=message['timestamp'])
sender = stanza.getAttribute('from')
if (sender is not None):
delay.sender = jid.JID(sender)
stanza.addChild(delay.toElement())
stanza['to'] = roomJID.userhost()
if stanza.hasAttribute('from'):
del stanza['from']
self.xmlstream.send(stanza)
| 7,635,375,756,510,448,000
|
Send history to create a MUC based on a one on one chat.
See: http://xmpp.org/extensions/xep-0045.html#continue
@param roomJID: The room jabber/xmpp entity id.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param messages: The history to send to the room as an ordered list of
message, represented by a dictionary with the keys
C{'stanza'}, holding the original stanza a
L{domish.Element}, and C{'timestamp'} with the
timestamp.
@type messages: C{list} of L{domish.Element}
|
wokkel/muc.py
|
history
|
Gandi/wokkel
|
python
|
def history(self, roomJID, messages):
"\n Send history to create a MUC based on a one on one chat.\n\n See: http://xmpp.org/extensions/xep-0045.html#continue\n\n @param roomJID: The room jabber/xmpp entity id.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param messages: The history to send to the room as an ordered list of\n message, represented by a dictionary with the keys\n C{'stanza'}, holding the original stanza a\n L{domish.Element}, and C{'timestamp'} with the\n timestamp.\n @type messages: C{list} of L{domish.Element}\n "
for message in messages:
stanza = message['stanza']
stanza['type'] = 'groupchat'
delay = Delay(stamp=message['timestamp'])
sender = stanza.getAttribute('from')
if (sender is not None):
delay.sender = jid.JID(sender)
stanza.addChild(delay.toElement())
stanza['to'] = roomJID.userhost()
if stanza.hasAttribute('from'):
del stanza['from']
self.xmlstream.send(stanza)
|
def getConfiguration(self, roomJID):
"\n Grab the configuration from the room.\n\n This sends an iq request to the room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @return: A deferred that fires with the room's configuration form as\n a L{data_form.Form} or C{None} if there are no configuration\n options available.\n "
def cb(response):
form = data_form.findForm(response.query, NS_MUC_CONFIG)
return form
request = ConfigureRequest(recipient=roomJID, options=None)
d = self.request(request)
d.addCallback(cb)
return d
| -5,454,010,734,910,756,000
|
Grab the configuration from the room.
This sends an iq request to the room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@return: A deferred that fires with the room's configuration form as
a L{data_form.Form} or C{None} if there are no configuration
options available.
|
wokkel/muc.py
|
getConfiguration
|
Gandi/wokkel
|
python
|
def getConfiguration(self, roomJID):
"\n Grab the configuration from the room.\n\n This sends an iq request to the room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @return: A deferred that fires with the room's configuration form as\n a L{data_form.Form} or C{None} if there are no configuration\n options available.\n "
def cb(response):
form = data_form.findForm(response.query, NS_MUC_CONFIG)
return form
request = ConfigureRequest(recipient=roomJID, options=None)
d = self.request(request)
d.addCallback(cb)
return d
|
def configure(self, roomJID, options):
'\n Configure a room.\n\n @param roomJID: The room to configure.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param options: A mapping of field names to values, or C{None} to\n cancel.\n @type options: C{dict}\n '
if (options is None):
options = False
request = ConfigureRequest(recipient=roomJID, options=options)
return self.request(request)
| 2,256,700,753,176,623,900
|
Configure a room.
@param roomJID: The room to configure.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param options: A mapping of field names to values, or C{None} to
cancel.
@type options: C{dict}
|
wokkel/muc.py
|
configure
|
Gandi/wokkel
|
python
|
def configure(self, roomJID, options):
'\n Configure a room.\n\n @param roomJID: The room to configure.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param options: A mapping of field names to values, or C{None} to\n cancel.\n @type options: C{dict}\n '
if (options is None):
options = False
request = ConfigureRequest(recipient=roomJID, options=options)
return self.request(request)
|
def _getAffiliationList(self, roomJID, affiliation):
'\n Send a request for an affiliation list in a room.\n '
def cb(response):
stanza = AdminStanza.fromElement(response)
return stanza.items
request = AdminStanza(recipient=roomJID, stanzaType='get')
request.items = [AdminItem(affiliation=affiliation)]
d = self.request(request)
d.addCallback(cb)
return d
| -2,799,156,953,370,444,300
|
Send a request for an affiliation list in a room.
|
wokkel/muc.py
|
_getAffiliationList
|
Gandi/wokkel
|
python
|
def _getAffiliationList(self, roomJID, affiliation):
'\n \n '
def cb(response):
stanza = AdminStanza.fromElement(response)
return stanza.items
request = AdminStanza(recipient=roomJID, stanzaType='get')
request.items = [AdminItem(affiliation=affiliation)]
d = self.request(request)
d.addCallback(cb)
return d
|
def _getRoleList(self, roomJID, role):
'\n Send a request for a role list in a room.\n '
def cb(response):
stanza = AdminStanza.fromElement(response)
return stanza.items
request = AdminStanza(recipient=roomJID, stanzaType='get')
request.items = [AdminItem(role=role)]
d = self.request(request)
d.addCallback(cb)
return d
| -7,067,061,049,045,694,000
|
Send a request for a role list in a room.
|
wokkel/muc.py
|
_getRoleList
|
Gandi/wokkel
|
python
|
def _getRoleList(self, roomJID, role):
'\n \n '
def cb(response):
stanza = AdminStanza.fromElement(response)
return stanza.items
request = AdminStanza(recipient=roomJID, stanzaType='get')
request.items = [AdminItem(role=role)]
d = self.request(request)
d.addCallback(cb)
return d
|
def getMemberList(self, roomJID):
'\n Get the member list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'member')
| 8,178,079,271,744,483,000
|
Get the member list of a room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
getMemberList
|
Gandi/wokkel
|
python
|
def getMemberList(self, roomJID):
'\n Get the member list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'member')
|
def getAdminList(self, roomJID):
'\n Get the admin list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'admin')
| 7,913,491,116,521,769,000
|
Get the admin list of a room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
getAdminList
|
Gandi/wokkel
|
python
|
def getAdminList(self, roomJID):
'\n Get the admin list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'admin')
|
def getBanList(self, roomJID):
'\n Get an outcast list from a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'outcast')
| 3,179,525,397,458,575,400
|
Get an outcast list from a room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
getBanList
|
Gandi/wokkel
|
python
|
def getBanList(self, roomJID):
'\n Get an outcast list from a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'outcast')
|
def getOwnerList(self, roomJID):
'\n Get an owner list from a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'owner')
| -1,628,875,077,073,642,200
|
Get an owner list from a room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
getOwnerList
|
Gandi/wokkel
|
python
|
def getOwnerList(self, roomJID):
'\n Get an owner list from a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._getAffiliationList(roomJID, 'owner')
|
def getModeratorList(self, roomJID):
'\n Get the moderator list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
d = self._getRoleList(roomJID, 'moderator')
return d
| 8,124,120,447,538,257,000
|
Get the moderator list of a room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
getModeratorList
|
Gandi/wokkel
|
python
|
def getModeratorList(self, roomJID):
'\n Get the moderator list of a room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
d = self._getRoleList(roomJID, 'moderator')
return d
|
def _setAffiliation(self, roomJID, entity, affiliation, reason=None, sender=None):
"\n Send a request to change an entity's affiliation to a MUC room.\n "
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
item = AdminItem(entity=entity, affiliation=affiliation, reason=reason)
request.items = [item]
return self.request(request)
| 9,061,982,443,933,875,000
|
Send a request to change an entity's affiliation to a MUC room.
|
wokkel/muc.py
|
_setAffiliation
|
Gandi/wokkel
|
python
|
def _setAffiliation(self, roomJID, entity, affiliation, reason=None, sender=None):
"\n \n "
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
item = AdminItem(entity=entity, affiliation=affiliation, reason=reason)
request.items = [item]
return self.request(request)
|
def _setRole(self, roomJID, nick, role, reason=None, sender=None):
"\n Send a request to change an occupant's role in a MUC room.\n "
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
item = AdminItem(nick=nick, role=role, reason=reason)
request.items = [item]
return self.request(request)
| 5,439,391,183,908,645,000
|
Send a request to change an occupant's role in a MUC room.
|
wokkel/muc.py
|
_setRole
|
Gandi/wokkel
|
python
|
def _setRole(self, roomJID, nick, role, reason=None, sender=None):
"\n \n "
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
item = AdminItem(nick=nick, role=role, reason=reason)
request.items = [item]
return self.request(request)
|
def modifyAffiliationList(self, roomJID, entities, affiliation, sender=None):
'\n Modify an affiliation list.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param entities: The list of entities to change for a room.\n @type entities: C{list} of\n L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param affiliation: The affilation to the entities will acquire.\n @type affiliation: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
request.items = [AdminItem(entity=entity, affiliation=affiliation) for entity in entities]
return self.request(request)
| -5,120,453,398,144,076,000
|
Modify an affiliation list.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param entities: The list of entities to change for a room.
@type entities: C{list} of
L{JID<twisted.words.protocols.jabber.jid.JID>}
@param affiliation: The affilation to the entities will acquire.
@type affiliation: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
modifyAffiliationList
|
Gandi/wokkel
|
python
|
def modifyAffiliationList(self, roomJID, entities, affiliation, sender=None):
'\n Modify an affiliation list.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param entities: The list of entities to change for a room.\n @type entities: C{list} of\n L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param affiliation: The affilation to the entities will acquire.\n @type affiliation: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
request = AdminStanza(recipient=roomJID, sender=sender, stanzaType='set')
request.items = [AdminItem(entity=entity, affiliation=affiliation) for entity in entities]
return self.request(request)
|
def grantVoice(self, roomJID, nick, reason=None, sender=None):
'\n Grant voice to an entity.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for granting voice to the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='participant', reason=reason, sender=sender)
| -6,749,614,054,787,218,000
|
Grant voice to an entity.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The nick name for the user in this room.
@type nick: C{unicode}
@param reason: The reason for granting voice to the entity.
@type reason: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
grantVoice
|
Gandi/wokkel
|
python
|
def grantVoice(self, roomJID, nick, reason=None, sender=None):
'\n Grant voice to an entity.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for granting voice to the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='participant', reason=reason, sender=sender)
|
def revokeVoice(self, roomJID, nick, reason=None, sender=None):
'\n Revoke voice from a participant.\n\n This will disallow the entity to send messages to a moderated room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for revoking voice from the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='visitor', reason=reason, sender=sender)
| -3,556,443,029,513,131,000
|
Revoke voice from a participant.
This will disallow the entity to send messages to a moderated room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The nick name for the user in this room.
@type nick: C{unicode}
@param reason: The reason for revoking voice from the entity.
@type reason: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
revokeVoice
|
Gandi/wokkel
|
python
|
def revokeVoice(self, roomJID, nick, reason=None, sender=None):
'\n Revoke voice from a participant.\n\n This will disallow the entity to send messages to a moderated room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for revoking voice from the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='visitor', reason=reason, sender=sender)
|
def grantModerator(self, roomJID, nick, reason=None, sender=None):
'\n Grant moderator privileges to a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for granting moderation to the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='moderator', reason=reason, sender=sender)
| 887,203,103,960,406,700
|
Grant moderator privileges to a MUC room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The nick name for the user in this room.
@type nick: C{unicode}
@param reason: The reason for granting moderation to the entity.
@type reason: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
grantModerator
|
Gandi/wokkel
|
python
|
def grantModerator(self, roomJID, nick, reason=None, sender=None):
'\n Grant moderator privileges to a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the user in this room.\n @type nick: C{unicode}\n\n @param reason: The reason for granting moderation to the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick=nick, role='moderator', reason=reason, sender=sender)
|
def ban(self, roomJID, entity, reason=None, sender=None):
'\n Ban a user from a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param entity: The bare JID of the entity to be banned.\n @type entity: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for banning the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setAffiliation(roomJID, entity, 'outcast', reason=reason, sender=sender)
| -4,115,670,468,276,500,000
|
Ban a user from a MUC room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param entity: The bare JID of the entity to be banned.
@type entity: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param reason: The reason for banning the entity.
@type reason: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
ban
|
Gandi/wokkel
|
python
|
def ban(self, roomJID, entity, reason=None, sender=None):
'\n Ban a user from a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param entity: The bare JID of the entity to be banned.\n @type entity: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for banning the entity.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setAffiliation(roomJID, entity, 'outcast', reason=reason, sender=sender)
|
def kick(self, roomJID, nick, reason=None, sender=None):
'\n Kick a user from a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The occupant to be banned.\n @type nick: C{unicode}\n\n @param reason: The reason given for the kick.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick, 'none', reason=reason, sender=sender)
| 3,500,691,337,500,821,000
|
Kick a user from a MUC room.
@param roomJID: The bare JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The occupant to be banned.
@type nick: C{unicode}
@param reason: The reason given for the kick.
@type reason: C{unicode}
@param sender: The entity sending the request.
@type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
kick
|
Gandi/wokkel
|
python
|
def kick(self, roomJID, nick, reason=None, sender=None):
'\n Kick a user from a MUC room.\n\n @param roomJID: The bare JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The occupant to be banned.\n @type nick: C{unicode}\n\n @param reason: The reason given for the kick.\n @type reason: C{unicode}\n\n @param sender: The entity sending the request.\n @type sender: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._setRole(roomJID, nick, 'none', reason=reason, sender=sender)
|
def destroy(self, roomJID, reason=None, alternate=None, password=None):
'\n Destroy a room.\n\n @param roomJID: The JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for the destruction of the room.\n @type reason: C{unicode}\n\n @param alternate: The JID of the room suggested as an alternate venue.\n @type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
request = DestructionRequest(recipient=roomJID, reason=reason, alternate=alternate, password=password)
return self.request(request)
| 7,246,284,918,132,050,000
|
Destroy a room.
@param roomJID: The JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param reason: The reason for the destruction of the room.
@type reason: C{unicode}
@param alternate: The JID of the room suggested as an alternate venue.
@type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
destroy
|
Gandi/wokkel
|
python
|
def destroy(self, roomJID, reason=None, alternate=None, password=None):
'\n Destroy a room.\n\n @param roomJID: The JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for the destruction of the room.\n @type reason: C{unicode}\n\n @param alternate: The JID of the room suggested as an alternate venue.\n @type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
request = DestructionRequest(recipient=roomJID, reason=reason, alternate=alternate, password=password)
return self.request(request)
|
def __init__(self, roomJID, nick):
'\n Initialize the room.\n '
self.roomJID = roomJID
self.setNick(nick)
self.roster = {}
| -6,570,074,872,449,545,000
|
Initialize the room.
|
wokkel/muc.py
|
__init__
|
Gandi/wokkel
|
python
|
def __init__(self, roomJID, nick):
'\n \n '
self.roomJID = roomJID
self.setNick(nick)
self.roster = {}
|
def addUser(self, user):
'\n Add a user to the room roster.\n\n @param user: The user object that is being added to the room.\n @type user: L{User}\n '
self.roster[user.nick] = user
| -8,182,693,432,624,344,000
|
Add a user to the room roster.
@param user: The user object that is being added to the room.
@type user: L{User}
|
wokkel/muc.py
|
addUser
|
Gandi/wokkel
|
python
|
def addUser(self, user):
'\n Add a user to the room roster.\n\n @param user: The user object that is being added to the room.\n @type user: L{User}\n '
self.roster[user.nick] = user
|
def inRoster(self, user):
'\n Check if a user is in the MUC room.\n\n @param user: The user object to check.\n @type user: L{User}\n '
return (user.nick in self.roster)
| 1,240,849,481,805,474,000
|
Check if a user is in the MUC room.
@param user: The user object to check.
@type user: L{User}
|
wokkel/muc.py
|
inRoster
|
Gandi/wokkel
|
python
|
def inRoster(self, user):
'\n Check if a user is in the MUC room.\n\n @param user: The user object to check.\n @type user: L{User}\n '
return (user.nick in self.roster)
|
def getUser(self, nick):
"\n Get a user from the room's roster.\n\n @param nick: The nick for the user in the MUC room.\n @type nick: C{unicode}\n "
return self.roster.get(nick)
| 7,890,928,055,650,853,000
|
Get a user from the room's roster.
@param nick: The nick for the user in the MUC room.
@type nick: C{unicode}
|
wokkel/muc.py
|
getUser
|
Gandi/wokkel
|
python
|
def getUser(self, nick):
"\n Get a user from the room's roster.\n\n @param nick: The nick for the user in the MUC room.\n @type nick: C{unicode}\n "
return self.roster.get(nick)
|
def removeUser(self, user):
"\n Remove a user from the MUC room's roster.\n\n @param user: The user object to check.\n @type user: L{User}\n "
if self.inRoster(user):
del self.roster[user.nick]
| 5,453,805,317,436,812,000
|
Remove a user from the MUC room's roster.
@param user: The user object to check.
@type user: L{User}
|
wokkel/muc.py
|
removeUser
|
Gandi/wokkel
|
python
|
def removeUser(self, user):
"\n Remove a user from the MUC room's roster.\n\n @param user: The user object to check.\n @type user: L{User}\n "
if self.inRoster(user):
del self.roster[user.nick]
|
def _addRoom(self, room):
'\n Add a room to the room collection.\n\n Rooms are stored by the JID of the room itself. I.e. it uses the Room\n ID and service parts of the Room JID.\n\n @note: An entity can only join a particular room once.\n '
roomJID = room.occupantJID.userhostJID()
self._rooms[roomJID] = room
| 212,867,097,126,227,550
|
Add a room to the room collection.
Rooms are stored by the JID of the room itself. I.e. it uses the Room
ID and service parts of the Room JID.
@note: An entity can only join a particular room once.
|
wokkel/muc.py
|
_addRoom
|
Gandi/wokkel
|
python
|
def _addRoom(self, room):
'\n Add a room to the room collection.\n\n Rooms are stored by the JID of the room itself. I.e. it uses the Room\n ID and service parts of the Room JID.\n\n @note: An entity can only join a particular room once.\n '
roomJID = room.occupantJID.userhostJID()
self._rooms[roomJID] = room
|
def _getRoom(self, roomJID):
'\n Grab a room from the room collection.\n\n This uses the Room ID and service parts of the given JID to look up\n the L{Room} instance associated with it.\n\n @type occupantJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._rooms.get(roomJID)
| -2,379,347,027,078,015,000
|
Grab a room from the room collection.
This uses the Room ID and service parts of the given JID to look up
the L{Room} instance associated with it.
@type occupantJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
_getRoom
|
Gandi/wokkel
|
python
|
def _getRoom(self, roomJID):
'\n Grab a room from the room collection.\n\n This uses the Room ID and service parts of the given JID to look up\n the L{Room} instance associated with it.\n\n @type occupantJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
return self._rooms.get(roomJID)
|
def _removeRoom(self, roomJID):
'\n Delete a room from the room collection.\n '
if (roomJID in self._rooms):
del self._rooms[roomJID]
| 7,946,502,388,913,218,000
|
Delete a room from the room collection.
|
wokkel/muc.py
|
_removeRoom
|
Gandi/wokkel
|
python
|
def _removeRoom(self, roomJID):
'\n \n '
if (roomJID in self._rooms):
del self._rooms[roomJID]
|
def _getRoomUser(self, stanza):
"\n Lookup the room and user associated with the stanza's sender.\n "
occupantJID = stanza.sender
if (not occupantJID):
return (None, None)
room = self._getRoom(occupantJID.userhostJID())
if (room is None):
return (None, None)
nick = occupantJID.resource
user = room.getUser(nick)
return (room, user)
| 3,306,524,792,348,439,000
|
Lookup the room and user associated with the stanza's sender.
|
wokkel/muc.py
|
_getRoomUser
|
Gandi/wokkel
|
python
|
def _getRoomUser(self, stanza):
"\n \n "
occupantJID = stanza.sender
if (not occupantJID):
return (None, None)
room = self._getRoom(occupantJID.userhostJID())
if (room is None):
return (None, None)
nick = occupantJID.resource
user = room.getUser(nick)
return (room, user)
|
def unavailableReceived(self, presence):
'\n Unavailable presence was received.\n\n If this was received from a MUC room occupant JID, that occupant has\n left the room.\n '
(room, user) = self._getRoomUser(presence)
if ((room is None) or (user is None)):
return
room.removeUser(user)
self.userLeftRoom(room, user)
| 3,576,241,825,482,311,000
|
Unavailable presence was received.
If this was received from a MUC room occupant JID, that occupant has
left the room.
|
wokkel/muc.py
|
unavailableReceived
|
Gandi/wokkel
|
python
|
def unavailableReceived(self, presence):
'\n Unavailable presence was received.\n\n If this was received from a MUC room occupant JID, that occupant has\n left the room.\n '
(room, user) = self._getRoomUser(presence)
if ((room is None) or (user is None)):
return
room.removeUser(user)
self.userLeftRoom(room, user)
|
def availableReceived(self, presence):
'\n Available presence was received.\n '
(room, user) = self._getRoomUser(presence)
if (room is None):
return
if (user is None):
nick = presence.sender.resource
user = User(nick, presence.entity)
user.status = presence.status
user.show = presence.show
if room.inRoster(user):
self.userUpdatedStatus(room, user, presence.show, presence.status)
else:
room.addUser(user)
self.userJoinedRoom(room, user)
| 5,144,098,115,746,772,000
|
Available presence was received.
|
wokkel/muc.py
|
availableReceived
|
Gandi/wokkel
|
python
|
def availableReceived(self, presence):
'\n \n '
(room, user) = self._getRoomUser(presence)
if (room is None):
return
if (user is None):
nick = presence.sender.resource
user = User(nick, presence.entity)
user.status = presence.status
user.show = presence.show
if room.inRoster(user):
self.userUpdatedStatus(room, user, presence.show, presence.status)
else:
room.addUser(user)
self.userJoinedRoom(room, user)
|
def groupChatReceived(self, message):
'\n A group chat message has been received from a MUC room.\n\n There are a few event methods that may get called here.\n L{receivedGroupChat}, L{receivedSubject} or L{receivedHistory}.\n '
(room, user) = self._getRoomUser(message)
if (room is None):
return
if message.subject:
self.receivedSubject(room, user, message.subject)
elif (message.delay is None):
self.receivedGroupChat(room, user, message)
else:
self.receivedHistory(room, user, message)
| 5,187,066,251,613,629,000
|
A group chat message has been received from a MUC room.
There are a few event methods that may get called here.
L{receivedGroupChat}, L{receivedSubject} or L{receivedHistory}.
|
wokkel/muc.py
|
groupChatReceived
|
Gandi/wokkel
|
python
|
def groupChatReceived(self, message):
'\n A group chat message has been received from a MUC room.\n\n There are a few event methods that may get called here.\n L{receivedGroupChat}, L{receivedSubject} or L{receivedHistory}.\n '
(room, user) = self._getRoomUser(message)
if (room is None):
return
if message.subject:
self.receivedSubject(room, user, message.subject)
elif (message.delay is None):
self.receivedGroupChat(room, user, message)
else:
self.receivedHistory(room, user, message)
|
def userJoinedRoom(self, room, user):
'\n User has joined a MUC room.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n\n @param room: The room the user has joined.\n @type room: L{Room}\n\n @param user: The user that joined the MUC room.\n @type user: L{User}\n '
pass
| 8,850,210,654,812,462,000
|
User has joined a MUC room.
This method will need to be modified inorder for clients to
do something when this event occurs.
@param room: The room the user has joined.
@type room: L{Room}
@param user: The user that joined the MUC room.
@type user: L{User}
|
wokkel/muc.py
|
userJoinedRoom
|
Gandi/wokkel
|
python
|
def userJoinedRoom(self, room, user):
'\n User has joined a MUC room.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n\n @param room: The room the user has joined.\n @type room: L{Room}\n\n @param user: The user that joined the MUC room.\n @type user: L{User}\n '
pass
|
def userLeftRoom(self, room, user):
'\n User has left a room.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n\n @param room: The room the user has joined.\n @type room: L{Room}\n\n @param user: The user that left the MUC room.\n @type user: L{User}\n '
pass
| -3,553,983,360,623,956,000
|
User has left a room.
This method will need to be modified inorder for clients to
do something when this event occurs.
@param room: The room the user has joined.
@type room: L{Room}
@param user: The user that left the MUC room.
@type user: L{User}
|
wokkel/muc.py
|
userLeftRoom
|
Gandi/wokkel
|
python
|
def userLeftRoom(self, room, user):
'\n User has left a room.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n\n @param room: The room the user has joined.\n @type room: L{Room}\n\n @param user: The user that left the MUC room.\n @type user: L{User}\n '
pass
|
def userUpdatedStatus(self, room, user, show, status):
'\n User Presence has been received.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n '
pass
| 7,898,166,675,434,161,000
|
User Presence has been received.
This method will need to be modified inorder for clients to
do something when this event occurs.
|
wokkel/muc.py
|
userUpdatedStatus
|
Gandi/wokkel
|
python
|
def userUpdatedStatus(self, room, user, show, status):
'\n User Presence has been received.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n '
pass
|
def receivedSubject(self, room, user, subject):
'\n A (new) room subject has been received.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n '
pass
| -6,649,055,416,230,628,000
|
A (new) room subject has been received.
This method will need to be modified inorder for clients to
do something when this event occurs.
|
wokkel/muc.py
|
receivedSubject
|
Gandi/wokkel
|
python
|
def receivedSubject(self, room, user, subject):
'\n A (new) room subject has been received.\n\n This method will need to be modified inorder for clients to\n do something when this event occurs.\n '
pass
|
def receivedGroupChat(self, room, user, message):
'\n A groupchat message was received.\n\n @param room: The room the message was received from.\n @type room: L{Room}\n\n @param user: The user that sent the message, or C{None} if it was a\n message from the room itself.\n @type user: L{User}\n\n @param message: The message.\n @type message: L{GroupChat}\n '
pass
| 8,303,721,188,655,743,000
|
A groupchat message was received.
@param room: The room the message was received from.
@type room: L{Room}
@param user: The user that sent the message, or C{None} if it was a
message from the room itself.
@type user: L{User}
@param message: The message.
@type message: L{GroupChat}
|
wokkel/muc.py
|
receivedGroupChat
|
Gandi/wokkel
|
python
|
def receivedGroupChat(self, room, user, message):
'\n A groupchat message was received.\n\n @param room: The room the message was received from.\n @type room: L{Room}\n\n @param user: The user that sent the message, or C{None} if it was a\n message from the room itself.\n @type user: L{User}\n\n @param message: The message.\n @type message: L{GroupChat}\n '
pass
|
def receivedHistory(self, room, user, message):
"\n A groupchat message from the room's discussion history was received.\n\n This is identical to L{receivedGroupChat}, with the delayed delivery\n information (timestamp and original sender) in C{message.delay}. For\n anonymous rooms, C{message.delay.sender} is the room's address.\n\n @param room: The room the message was received from.\n @type room: L{Room}\n\n @param user: The user that sent the message, or C{None} if it was a\n message from the room itself.\n @type user: L{User}\n\n @param message: The message.\n @type message: L{GroupChat}\n "
pass
| -2,393,166,430,892,089,300
|
A groupchat message from the room's discussion history was received.
This is identical to L{receivedGroupChat}, with the delayed delivery
information (timestamp and original sender) in C{message.delay}. For
anonymous rooms, C{message.delay.sender} is the room's address.
@param room: The room the message was received from.
@type room: L{Room}
@param user: The user that sent the message, or C{None} if it was a
message from the room itself.
@type user: L{User}
@param message: The message.
@type message: L{GroupChat}
|
wokkel/muc.py
|
receivedHistory
|
Gandi/wokkel
|
python
|
def receivedHistory(self, room, user, message):
"\n A groupchat message from the room's discussion history was received.\n\n This is identical to L{receivedGroupChat}, with the delayed delivery\n information (timestamp and original sender) in C{message.delay}. For\n anonymous rooms, C{message.delay.sender} is the room's address.\n\n @param room: The room the message was received from.\n @type room: L{Room}\n\n @param user: The user that sent the message, or C{None} if it was a\n message from the room itself.\n @type user: L{User}\n\n @param message: The message.\n @type message: L{GroupChat}\n "
pass
|
def join(self, roomJID, nick, historyOptions=None, password=None):
'\n Join a MUC room by sending presence to it.\n\n @param roomJID: The JID of the room the entity is joining.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the entitity joining the room.\n @type nick: C{unicode}\n\n @param historyOptions: Options for conversation history sent by the\n room upon joining.\n @type historyOptions: L{HistoryOptions}\n\n @param password: Optional password for the room.\n @type password: C{unicode}\n\n @return: A deferred that fires with the room when the entity is in the\n room, or with a failure if an error has occurred.\n '
def cb(presence):
'\n We have presence that says we joined a room.\n '
if (STATUS_CODE.ROOM_CREATED in presence.mucStatuses):
room.locked = True
return room
def eb(failure):
self._removeRoom(roomJID)
return failure
room = Room(roomJID, nick)
self._addRoom(room)
d = MUCClientProtocol.join(self, roomJID, nick, historyOptions, password)
d.addCallbacks(cb, eb)
return d
| -775,746,254,155,071,400
|
Join a MUC room by sending presence to it.
@param roomJID: The JID of the room the entity is joining.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The nick name for the entitity joining the room.
@type nick: C{unicode}
@param historyOptions: Options for conversation history sent by the
room upon joining.
@type historyOptions: L{HistoryOptions}
@param password: Optional password for the room.
@type password: C{unicode}
@return: A deferred that fires with the room when the entity is in the
room, or with a failure if an error has occurred.
|
wokkel/muc.py
|
join
|
Gandi/wokkel
|
python
|
def join(self, roomJID, nick, historyOptions=None, password=None):
'\n Join a MUC room by sending presence to it.\n\n @param roomJID: The JID of the room the entity is joining.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The nick name for the entitity joining the room.\n @type nick: C{unicode}\n\n @param historyOptions: Options for conversation history sent by the\n room upon joining.\n @type historyOptions: L{HistoryOptions}\n\n @param password: Optional password for the room.\n @type password: C{unicode}\n\n @return: A deferred that fires with the room when the entity is in the\n room, or with a failure if an error has occurred.\n '
def cb(presence):
'\n We have presence that says we joined a room.\n '
if (STATUS_CODE.ROOM_CREATED in presence.mucStatuses):
room.locked = True
return room
def eb(failure):
self._removeRoom(roomJID)
return failure
room = Room(roomJID, nick)
self._addRoom(room)
d = MUCClientProtocol.join(self, roomJID, nick, historyOptions, password)
d.addCallbacks(cb, eb)
return d
|
def nick(self, roomJID, nick):
"\n Change an entity's nick name in a MUC room.\n\n See: http://xmpp.org/extensions/xep-0045.html#changenick\n\n @param roomJID: The JID of the room, i.e. without a resource.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The new nick name within the room.\n @type nick: C{unicode}\n "
def cb(presence):
room.setNick(nick)
return room
room = self._getRoom(roomJID)
d = MUCClientProtocol.nick(self, roomJID, nick)
d.addCallback(cb)
return d
| -6,502,918,311,450,282,000
|
Change an entity's nick name in a MUC room.
See: http://xmpp.org/extensions/xep-0045.html#changenick
@param roomJID: The JID of the room, i.e. without a resource.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param nick: The new nick name within the room.
@type nick: C{unicode}
|
wokkel/muc.py
|
nick
|
Gandi/wokkel
|
python
|
def nick(self, roomJID, nick):
"\n Change an entity's nick name in a MUC room.\n\n See: http://xmpp.org/extensions/xep-0045.html#changenick\n\n @param roomJID: The JID of the room, i.e. without a resource.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param nick: The new nick name within the room.\n @type nick: C{unicode}\n "
def cb(presence):
room.setNick(nick)
return room
room = self._getRoom(roomJID)
d = MUCClientProtocol.nick(self, roomJID, nick)
d.addCallback(cb)
return d
|
def leave(self, roomJID):
'\n Leave a MUC room.\n\n See: http://xmpp.org/extensions/xep-0045.html#exit\n\n @param roomJID: The Room JID of the room to leave.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
def cb(presence):
self._removeRoom(roomJID)
d = MUCClientProtocol.leave(self, roomJID)
d.addCallback(cb)
return d
| 7,147,643,475,307,881,000
|
Leave a MUC room.
See: http://xmpp.org/extensions/xep-0045.html#exit
@param roomJID: The Room JID of the room to leave.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
leave
|
Gandi/wokkel
|
python
|
def leave(self, roomJID):
'\n Leave a MUC room.\n\n See: http://xmpp.org/extensions/xep-0045.html#exit\n\n @param roomJID: The Room JID of the room to leave.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n '
def cb(presence):
self._removeRoom(roomJID)
d = MUCClientProtocol.leave(self, roomJID)
d.addCallback(cb)
return d
|
def status(self, roomJID, show=None, status=None):
'\n Change user status.\n\n See: http://xmpp.org/extensions/xep-0045.html#changepres\n\n @param roomJID: The Room JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param show: The availability of the entity. Common values are xa,\n available, etc\n @type show: C{unicode}\n\n @param status: The current status of the entity.\n @type status: C{unicode}\n '
room = self._getRoom(roomJID)
d = MUCClientProtocol.status(self, roomJID, show, status)
d.addCallback((lambda _: room))
return d
| 4,186,971,232,257,511,000
|
Change user status.
See: http://xmpp.org/extensions/xep-0045.html#changepres
@param roomJID: The Room JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param show: The availability of the entity. Common values are xa,
available, etc
@type show: C{unicode}
@param status: The current status of the entity.
@type status: C{unicode}
|
wokkel/muc.py
|
status
|
Gandi/wokkel
|
python
|
def status(self, roomJID, show=None, status=None):
'\n Change user status.\n\n See: http://xmpp.org/extensions/xep-0045.html#changepres\n\n @param roomJID: The Room JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param show: The availability of the entity. Common values are xa,\n available, etc\n @type show: C{unicode}\n\n @param status: The current status of the entity.\n @type status: C{unicode}\n '
room = self._getRoom(roomJID)
d = MUCClientProtocol.status(self, roomJID, show, status)
d.addCallback((lambda _: room))
return d
|
def destroy(self, roomJID, reason=None, alternate=None, password=None):
'\n Destroy a room.\n\n @param roomJID: The JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for the destruction of the room.\n @type reason: C{unicode}\n\n @param alternate: The JID of the room suggested as an alternate venue.\n @type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
def destroyed(iq):
self._removeRoom(roomJID)
d = MUCClientProtocol.destroy(self, roomJID, reason, alternate)
d.addCallback(destroyed)
return d
| -1,991,528,344,491,774,500
|
Destroy a room.
@param roomJID: The JID of the room.
@type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}
@param reason: The reason for the destruction of the room.
@type reason: C{unicode}
@param alternate: The JID of the room suggested as an alternate venue.
@type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}
|
wokkel/muc.py
|
destroy
|
Gandi/wokkel
|
python
|
def destroy(self, roomJID, reason=None, alternate=None, password=None):
'\n Destroy a room.\n\n @param roomJID: The JID of the room.\n @type roomJID: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n @param reason: The reason for the destruction of the room.\n @type reason: C{unicode}\n\n @param alternate: The JID of the room suggested as an alternate venue.\n @type alternate: L{JID<twisted.words.protocols.jabber.jid.JID>}\n\n '
def destroyed(iq):
self._removeRoom(roomJID)
d = MUCClientProtocol.destroy(self, roomJID, reason, alternate)
d.addCallback(destroyed)
return d
|
def cb(presence):
'\n We have presence that says we joined a room.\n '
if (STATUS_CODE.ROOM_CREATED in presence.mucStatuses):
room.locked = True
return room
| -8,467,959,678,290,151,000
|
We have presence that says we joined a room.
|
wokkel/muc.py
|
cb
|
Gandi/wokkel
|
python
|
def cb(presence):
'\n \n '
if (STATUS_CODE.ROOM_CREATED in presence.mucStatuses):
room.locked = True
return room
|
def testPing(self):
'Test Ping'
pass
| 4,234,929,287,827,278,000
|
Test Ping
|
test/test_ping.py
|
testPing
|
velopaymentsapi/velo-python
|
python
|
def testPing(self):
pass
|
def calc(self, t):
'\n Calc position\n if t is outside of the input x, return None\n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = (((self.a[i] + (self.b[i] * dx)) + (self.c[i] * (dx ** 2.0))) + (self.d[i] * (dx ** 3.0)))
return result
| 3,248,057,401,415,408,000
|
Calc position
if t is outside of the input x, return None
|
cubic_spline_planner.py
|
calc
|
hadleyhzy34/mpc_python_traj
|
python
|
def calc(self, t):
'\n Calc position\n if t is outside of the input x, return None\n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = (((self.a[i] + (self.b[i] * dx)) + (self.c[i] * (dx ** 2.0))) + (self.d[i] * (dx ** 3.0)))
return result
|
def calcd(self, t):
'\n Calc first derivative\n if t is outside of the input x, return None\n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = ((self.b[i] + ((2.0 * self.c[i]) * dx)) + ((3.0 * self.d[i]) * (dx ** 2.0)))
return result
| -103,370,416,844,136,660
|
Calc first derivative
if t is outside of the input x, return None
|
cubic_spline_planner.py
|
calcd
|
hadleyhzy34/mpc_python_traj
|
python
|
def calcd(self, t):
'\n Calc first derivative\n if t is outside of the input x, return None\n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = ((self.b[i] + ((2.0 * self.c[i]) * dx)) + ((3.0 * self.d[i]) * (dx ** 2.0)))
return result
|
def calcdd(self, t):
'\n Calc second derivative\n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = ((2.0 * self.c[i]) + ((6.0 * self.d[i]) * dx))
return result
| 8,793,203,312,076,528,000
|
Calc second derivative
|
cubic_spline_planner.py
|
calcdd
|
hadleyhzy34/mpc_python_traj
|
python
|
def calcdd(self, t):
'\n \n '
if (t < self.x[0]):
return None
elif (t > self.x[(- 1)]):
return None
i = self.__search_index(t)
dx = (t - self.x[i])
result = ((2.0 * self.c[i]) + ((6.0 * self.d[i]) * dx))
return result
|
def __search_index(self, x):
'\n search data segment index\n '
return (bisect.bisect(self.x, x) - 1)
| 3,739,817,014,440,759,000
|
search data segment index
|
cubic_spline_planner.py
|
__search_index
|
hadleyhzy34/mpc_python_traj
|
python
|
def __search_index(self, x):
'\n \n '
return (bisect.bisect(self.x, x) - 1)
|
def __calc_A(self, h):
'\n calc matrix A for spline coefficient c\n '
A = np.zeros((self.nx, self.nx))
A[(0, 0)] = 1.0
for i in range((self.nx - 1)):
if (i != (self.nx - 2)):
A[((i + 1), (i + 1))] = (2.0 * (h[i] + h[(i + 1)]))
A[((i + 1), i)] = h[i]
A[(i, (i + 1))] = h[i]
A[(0, 1)] = 0.0
A[((self.nx - 1), (self.nx - 2))] = 0.0
A[((self.nx - 1), (self.nx - 1))] = 1.0
return A
| -9,205,206,347,078,354,000
|
calc matrix A for spline coefficient c
|
cubic_spline_planner.py
|
__calc_A
|
hadleyhzy34/mpc_python_traj
|
python
|
def __calc_A(self, h):
'\n \n '
A = np.zeros((self.nx, self.nx))
A[(0, 0)] = 1.0
for i in range((self.nx - 1)):
if (i != (self.nx - 2)):
A[((i + 1), (i + 1))] = (2.0 * (h[i] + h[(i + 1)]))
A[((i + 1), i)] = h[i]
A[(i, (i + 1))] = h[i]
A[(0, 1)] = 0.0
A[((self.nx - 1), (self.nx - 2))] = 0.0
A[((self.nx - 1), (self.nx - 1))] = 1.0
return A
|
def __calc_B(self, h):
'\n calc matrix B for spline coefficient c\n '
B = np.zeros(self.nx)
for i in range((self.nx - 2)):
B[(i + 1)] = (((3.0 * (self.a[(i + 2)] - self.a[(i + 1)])) / h[(i + 1)]) - ((3.0 * (self.a[(i + 1)] - self.a[i])) / h[i]))
return B
| 903,250,203,395,355,500
|
calc matrix B for spline coefficient c
|
cubic_spline_planner.py
|
__calc_B
|
hadleyhzy34/mpc_python_traj
|
python
|
def __calc_B(self, h):
'\n \n '
B = np.zeros(self.nx)
for i in range((self.nx - 2)):
B[(i + 1)] = (((3.0 * (self.a[(i + 2)] - self.a[(i + 1)])) / h[(i + 1)]) - ((3.0 * (self.a[(i + 1)] - self.a[i])) / h[i]))
return B
|
def calc_position(self, s):
'\n calc position\n '
x = self.sx.calc(s)
y = self.sy.calc(s)
return (x, y)
| -4,300,209,088,292,946,400
|
calc position
|
cubic_spline_planner.py
|
calc_position
|
hadleyhzy34/mpc_python_traj
|
python
|
def calc_position(self, s):
'\n \n '
x = self.sx.calc(s)
y = self.sy.calc(s)
return (x, y)
|
def calc_curvature(self, s):
'\n calc curvature\n '
dx = self.sx.calcd(s)
ddx = self.sx.calcdd(s)
dy = self.sy.calcd(s)
ddy = self.sy.calcdd(s)
k = (((ddy * dx) - (ddx * dy)) / (((dx ** 2) + (dy ** 2)) ** (3 / 2)))
return k
| -8,597,725,787,627,640,000
|
calc curvature
|
cubic_spline_planner.py
|
calc_curvature
|
hadleyhzy34/mpc_python_traj
|
python
|
def calc_curvature(self, s):
'\n \n '
dx = self.sx.calcd(s)
ddx = self.sx.calcdd(s)
dy = self.sy.calcd(s)
ddy = self.sy.calcdd(s)
k = (((ddy * dx) - (ddx * dy)) / (((dx ** 2) + (dy ** 2)) ** (3 / 2)))
return k
|
def calc_yaw(self, s):
'\n calc yaw\n '
dx = self.sx.calcd(s)
dy = self.sy.calcd(s)
yaw = math.atan2(dy, dx)
return yaw
| 8,619,149,569,145,831,000
|
calc yaw
|
cubic_spline_planner.py
|
calc_yaw
|
hadleyhzy34/mpc_python_traj
|
python
|
def calc_yaw(self, s):
'\n \n '
dx = self.sx.calcd(s)
dy = self.sy.calcd(s)
yaw = math.atan2(dy, dx)
return yaw
|
def csce(args: Namespace):
'Parse configuration options from Cobalt Strike Beacon.'
if (not args.source.is_file()):
logger.error('Source path does not exist or is not file')
return 1
if args.cs_version:
version_list = [args.cs_version]
else:
version_list = list(CobaltStrikeConfigParser.SUPPORTED_VERSIONS)
config: Dict[(str, Any)] = dict()
for version in version_list:
with CobaltStrikeConfigParser(args.source, version) as parser:
try:
config = parser.parse_config()
break
except CobaltStrikeError:
pass
print(json.dumps(config, indent=(2 if args.pretty else None), cls=JSONEncoderWithBinarySupport))
return 0
| -3,054,200,988,728,596,000
|
Parse configuration options from Cobalt Strike Beacon.
|
libcsce/bin/csce.py
|
csce
|
strozfriedberg/cobaltstrike-config-extractor
|
python
|
def csce(args: Namespace):
if (not args.source.is_file()):
logger.error('Source path does not exist or is not file')
return 1
if args.cs_version:
version_list = [args.cs_version]
else:
version_list = list(CobaltStrikeConfigParser.SUPPORTED_VERSIONS)
config: Dict[(str, Any)] = dict()
for version in version_list:
with CobaltStrikeConfigParser(args.source, version) as parser:
try:
config = parser.parse_config()
break
except CobaltStrikeError:
pass
print(json.dumps(config, indent=(2 if args.pretty else None), cls=JSONEncoderWithBinarySupport))
return 0
|
def create_skbuild_argparser():
'Create and return a scikit-build argument parser.'
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--build-type', default='Release', metavar='', help='specify the CMake build type (e.g. Debug or Release)')
parser.add_argument('-G', '--generator', metavar='', help='specify the CMake build system generator')
parser.add_argument('-j', metavar='N', type=int, dest='jobs', help='allow N build jobs at once')
parser.add_argument('--cmake-executable', default=None, metavar='', help='specify the path to the cmake executable')
parser.add_argument('--install-target', default=None, metavar='', help='specify the CMake target performing the install. If not provided, uses the target ``install``')
parser.add_argument('--skip-generator-test', action='store_true', help='skip generator test when a generator is explicitly selected using --generator')
return parser
| -6,549,896,405,690,057,000
|
Create and return a scikit-build argument parser.
|
skbuild/setuptools_wrap.py
|
create_skbuild_argparser
|
pekkarr/scikit-build
|
python
|
def create_skbuild_argparser():
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument('--build-type', default='Release', metavar=, help='specify the CMake build type (e.g. Debug or Release)')
parser.add_argument('-G', '--generator', metavar=, help='specify the CMake build system generator')
parser.add_argument('-j', metavar='N', type=int, dest='jobs', help='allow N build jobs at once')
parser.add_argument('--cmake-executable', default=None, metavar=, help='specify the path to the cmake executable')
parser.add_argument('--install-target', default=None, metavar=, help='specify the CMake target performing the install. If not provided, uses the target ``install``')
parser.add_argument('--skip-generator-test', action='store_true', help='skip generator test when a generator is explicitly selected using --generator')
return parser
|
def _is_cmake_configure_argument(arg):
'Return True if ``arg`` is a relevant argument to pass to cmake when configuring a project.'
for cmake_arg in ('-C', '-D'):
if arg.startswith(cmake_arg):
return True
return False
| -3,802,505,428,589,892,000
|
Return True if ``arg`` is a relevant argument to pass to cmake when configuring a project.
|
skbuild/setuptools_wrap.py
|
_is_cmake_configure_argument
|
pekkarr/scikit-build
|
python
|
def _is_cmake_configure_argument(arg):
for cmake_arg in ('-C', '-D'):
if arg.startswith(cmake_arg):
return True
return False
|
def parse_skbuild_args(args, cmake_args, build_tool_args):
'\n Parse arguments in the scikit-build argument set. Convert specified\n arguments to proper format and append to cmake_args and build_tool_args.\n Returns the tuple ``(remaining arguments, cmake executable, skip_generator_test)``.\n '
parser = create_skbuild_argparser()
cmake_args.extend([arg for arg in args if _is_cmake_configure_argument(arg)])
args = [arg for arg in args if (not _is_cmake_configure_argument(arg))]
(namespace, remaining_args) = parser.parse_known_args(args)
cmake_args.append(('-DCMAKE_BUILD_TYPE:STRING=' + namespace.build_type))
if (namespace.generator is not None):
cmake_args.extend(['-G', namespace.generator])
build_tool_args.extend(['--config', namespace.build_type])
if (namespace.jobs is not None):
build_tool_args.extend(['-j', str(namespace.jobs)])
if (namespace.install_target is not None):
build_tool_args.extend(['--install-target', namespace.install_target])
if ((namespace.generator is None) and (namespace.skip_generator_test is True)):
sys.exit('ERROR: Specifying --skip-generator-test requires --generator to also be specified.')
return (remaining_args, namespace.cmake_executable, namespace.skip_generator_test)
| 3,962,547,829,053,957,000
|
Parse arguments in the scikit-build argument set. Convert specified
arguments to proper format and append to cmake_args and build_tool_args.
Returns the tuple ``(remaining arguments, cmake executable, skip_generator_test)``.
|
skbuild/setuptools_wrap.py
|
parse_skbuild_args
|
pekkarr/scikit-build
|
python
|
def parse_skbuild_args(args, cmake_args, build_tool_args):
'\n Parse arguments in the scikit-build argument set. Convert specified\n arguments to proper format and append to cmake_args and build_tool_args.\n Returns the tuple ``(remaining arguments, cmake executable, skip_generator_test)``.\n '
parser = create_skbuild_argparser()
cmake_args.extend([arg for arg in args if _is_cmake_configure_argument(arg)])
args = [arg for arg in args if (not _is_cmake_configure_argument(arg))]
(namespace, remaining_args) = parser.parse_known_args(args)
cmake_args.append(('-DCMAKE_BUILD_TYPE:STRING=' + namespace.build_type))
if (namespace.generator is not None):
cmake_args.extend(['-G', namespace.generator])
build_tool_args.extend(['--config', namespace.build_type])
if (namespace.jobs is not None):
build_tool_args.extend(['-j', str(namespace.jobs)])
if (namespace.install_target is not None):
build_tool_args.extend(['--install-target', namespace.install_target])
if ((namespace.generator is None) and (namespace.skip_generator_test is True)):
sys.exit('ERROR: Specifying --skip-generator-test requires --generator to also be specified.')
return (remaining_args, namespace.cmake_executable, namespace.skip_generator_test)
|
def parse_args():
'This function parses the command-line arguments ``sys.argv`` and returns\n the tuple ``(setuptools_args, cmake_executable, skip_generator_test, cmake_args, build_tool_args)``\n where each ``*_args`` element corresponds to a set of arguments separated by ``--``.'
dutils = []
cmake = []
make = []
argsets = [dutils, cmake, make]
i = 0
separator = '--'
for arg in sys.argv:
if (arg == separator):
i += 1
if (i >= len(argsets)):
sys.exit('ERROR: Too many "{}" separators provided (expected at most {}).'.format(separator, (len(argsets) - 1)))
else:
argsets[i].append(arg)
(dutils, cmake_executable, skip_generator_test) = parse_skbuild_args(dutils, cmake, make)
return (dutils, cmake_executable, skip_generator_test, cmake, make)
| 8,402,786,183,257,945,000
|
This function parses the command-line arguments ``sys.argv`` and returns
the tuple ``(setuptools_args, cmake_executable, skip_generator_test, cmake_args, build_tool_args)``
where each ``*_args`` element corresponds to a set of arguments separated by ``--``.
|
skbuild/setuptools_wrap.py
|
parse_args
|
pekkarr/scikit-build
|
python
|
def parse_args():
'This function parses the command-line arguments ``sys.argv`` and returns\n the tuple ``(setuptools_args, cmake_executable, skip_generator_test, cmake_args, build_tool_args)``\n where each ``*_args`` element corresponds to a set of arguments separated by ``--``.'
dutils = []
cmake = []
make = []
argsets = [dutils, cmake, make]
i = 0
separator = '--'
for arg in sys.argv:
if (arg == separator):
i += 1
if (i >= len(argsets)):
sys.exit('ERROR: Too many "{}" separators provided (expected at most {}).'.format(separator, (len(argsets) - 1)))
else:
argsets[i].append(arg)
(dutils, cmake_executable, skip_generator_test) = parse_skbuild_args(dutils, cmake, make)
return (dutils, cmake_executable, skip_generator_test, cmake, make)
|
def _parse_setuptools_arguments(setup_attrs):
"This function instantiates a Distribution object and\n parses the command line arguments.\n\n It returns the tuple ``(display_only, help_commands, commands, hide_listing, force_cmake, skip_cmake, plat_name)``\n where\n\n - display_only is a boolean indicating if an argument like '--help',\n '--help-commands' or '--author' was passed.\n - help_commands is a boolean indicating if argument '--help-commands'\n was passed.\n - commands contains the list of commands that were passed.\n - hide_listing is a boolean indicating if the list of files being included\n in the distribution is displayed or not.\n - force_cmake a boolean indicating that CMake should always be executed.\n - skip_cmake is a boolean indicating if the execution of CMake should\n explicitly be skipped.\n - plat_name is a string identifying the platform name to embed in generated\n filenames. It defaults to :func:`skbuild.constants.skbuild_plat_name()`.\n - build_ext_inplace is a boolean indicating if ``build_ext`` command was\n specified along with the --inplace argument.\n\n Otherwise it raises DistutilsArgError exception if there are\n any error on the command-line, and it raises DistutilsGetoptError\n if there any error in the command 'options' attribute.\n\n The code has been adapted from the setup() function available\n in distutils/core.py.\n "
setup_attrs = dict(setup_attrs)
setup_attrs['script_name'] = os.path.basename(sys.argv[0])
dist = upstream_Distribution(setup_attrs)
upstream_Distribution.global_options.extend([('hide-listing', None, 'do not display list of files being included in the distribution'), ('force-cmake', None, 'always run CMake'), ('skip-cmake', None, 'do not run CMake')])
dist.parse_config_files()
with _capture_output():
result = dist.parse_command_line()
display_only = (not result)
if (not hasattr(dist, 'hide_listing')):
dist.hide_listing = False
if (not hasattr(dist, 'force_cmake')):
dist.force_cmake = False
if (not hasattr(dist, 'skip_cmake')):
dist.skip_cmake = False
plat_names = set()
for cmd in [dist.get_command_obj(command) for command in dist.commands]:
if (getattr(cmd, 'plat_name', None) is not None):
plat_names.add(cmd.plat_name)
if (not plat_names):
plat_names.add(None)
elif (len(plat_names) > 1):
raise SKBuildError(('--plat-name is ambiguous: %s' % ', '.join(plat_names)))
plat_name = list(plat_names)[0]
build_ext_inplace = dist.get_command_obj('build_ext').inplace
return (display_only, dist.help_commands, dist.commands, dist.hide_listing, dist.force_cmake, dist.skip_cmake, plat_name, build_ext_inplace)
| -3,338,949,724,349,137,400
|
This function instantiates a Distribution object and
parses the command line arguments.
It returns the tuple ``(display_only, help_commands, commands, hide_listing, force_cmake, skip_cmake, plat_name)``
where
- display_only is a boolean indicating if an argument like '--help',
'--help-commands' or '--author' was passed.
- help_commands is a boolean indicating if argument '--help-commands'
was passed.
- commands contains the list of commands that were passed.
- hide_listing is a boolean indicating if the list of files being included
in the distribution is displayed or not.
- force_cmake a boolean indicating that CMake should always be executed.
- skip_cmake is a boolean indicating if the execution of CMake should
explicitly be skipped.
- plat_name is a string identifying the platform name to embed in generated
filenames. It defaults to :func:`skbuild.constants.skbuild_plat_name()`.
- build_ext_inplace is a boolean indicating if ``build_ext`` command was
specified along with the --inplace argument.
Otherwise it raises DistutilsArgError exception if there are
any error on the command-line, and it raises DistutilsGetoptError
if there any error in the command 'options' attribute.
The code has been adapted from the setup() function available
in distutils/core.py.
|
skbuild/setuptools_wrap.py
|
_parse_setuptools_arguments
|
pekkarr/scikit-build
|
python
|
def _parse_setuptools_arguments(setup_attrs):
"This function instantiates a Distribution object and\n parses the command line arguments.\n\n It returns the tuple ``(display_only, help_commands, commands, hide_listing, force_cmake, skip_cmake, plat_name)``\n where\n\n - display_only is a boolean indicating if an argument like '--help',\n '--help-commands' or '--author' was passed.\n - help_commands is a boolean indicating if argument '--help-commands'\n was passed.\n - commands contains the list of commands that were passed.\n - hide_listing is a boolean indicating if the list of files being included\n in the distribution is displayed or not.\n - force_cmake a boolean indicating that CMake should always be executed.\n - skip_cmake is a boolean indicating if the execution of CMake should\n explicitly be skipped.\n - plat_name is a string identifying the platform name to embed in generated\n filenames. It defaults to :func:`skbuild.constants.skbuild_plat_name()`.\n - build_ext_inplace is a boolean indicating if ``build_ext`` command was\n specified along with the --inplace argument.\n\n Otherwise it raises DistutilsArgError exception if there are\n any error on the command-line, and it raises DistutilsGetoptError\n if there any error in the command 'options' attribute.\n\n The code has been adapted from the setup() function available\n in distutils/core.py.\n "
setup_attrs = dict(setup_attrs)
setup_attrs['script_name'] = os.path.basename(sys.argv[0])
dist = upstream_Distribution(setup_attrs)
upstream_Distribution.global_options.extend([('hide-listing', None, 'do not display list of files being included in the distribution'), ('force-cmake', None, 'always run CMake'), ('skip-cmake', None, 'do not run CMake')])
dist.parse_config_files()
with _capture_output():
result = dist.parse_command_line()
display_only = (not result)
if (not hasattr(dist, 'hide_listing')):
dist.hide_listing = False
if (not hasattr(dist, 'force_cmake')):
dist.force_cmake = False
if (not hasattr(dist, 'skip_cmake')):
dist.skip_cmake = False
plat_names = set()
for cmd in [dist.get_command_obj(command) for command in dist.commands]:
if (getattr(cmd, 'plat_name', None) is not None):
plat_names.add(cmd.plat_name)
if (not plat_names):
plat_names.add(None)
elif (len(plat_names) > 1):
raise SKBuildError(('--plat-name is ambiguous: %s' % ', '.join(plat_names)))
plat_name = list(plat_names)[0]
build_ext_inplace = dist.get_command_obj('build_ext').inplace
return (display_only, dist.help_commands, dist.commands, dist.hide_listing, dist.force_cmake, dist.skip_cmake, plat_name, build_ext_inplace)
|
def strip_package(package_parts, module_file):
"Given ``package_parts`` (e.g. ``['foo', 'bar']``) and a\n ``module_file`` (e.g. ``foo/bar/jaz/rock/roll.py``), starting\n from the left, this function will strip the parts of the path\n matching the package parts and return a new string\n (e.g ``jaz/rock/roll.py``).\n\n The function will work as expected for either Windows or Unix-style\n ``module_file`` and this independently of the platform.\n "
if ((not package_parts) or os.path.isabs(module_file)):
return module_file
package = '/'.join(package_parts)
module_dir = os.path.dirname(module_file.replace('\\', '/'))
module_dir = module_dir[:len(package)]
return (module_file[(len(package) + 1):] if ((package != '') and module_dir.startswith(package)) else module_file)
| 5,353,424,950,498,382,000
|
Given ``package_parts`` (e.g. ``['foo', 'bar']``) and a
``module_file`` (e.g. ``foo/bar/jaz/rock/roll.py``), starting
from the left, this function will strip the parts of the path
matching the package parts and return a new string
(e.g ``jaz/rock/roll.py``).
The function will work as expected for either Windows or Unix-style
``module_file`` and this independently of the platform.
|
skbuild/setuptools_wrap.py
|
strip_package
|
pekkarr/scikit-build
|
python
|
def strip_package(package_parts, module_file):
"Given ``package_parts`` (e.g. ``['foo', 'bar']``) and a\n ``module_file`` (e.g. ``foo/bar/jaz/rock/roll.py``), starting\n from the left, this function will strip the parts of the path\n matching the package parts and return a new string\n (e.g ``jaz/rock/roll.py``).\n\n The function will work as expected for either Windows or Unix-style\n ``module_file`` and this independently of the platform.\n "
if ((not package_parts) or os.path.isabs(module_file)):
return module_file
package = '/'.join(package_parts)
module_dir = os.path.dirname(module_file.replace('\\', '/'))
module_dir = module_dir[:len(package)]
return (module_file[(len(package) + 1):] if ((package != ) and module_dir.startswith(package)) else module_file)
|
def _package_data_contain_module(module, package_data):
'Return True if the ``module`` is contained\n in the ``package_data``.\n\n ``module`` is a tuple of the form\n ``(package, modulename, module_file)``.\n '
(package, _, module_file) = module
if (package not in package_data):
return False
if (strip_package(package.split('.'), module_file) in package_data[package]):
return True
return False
| -4,050,379,927,419,049,500
|
Return True if the ``module`` is contained
in the ``package_data``.
``module`` is a tuple of the form
``(package, modulename, module_file)``.
|
skbuild/setuptools_wrap.py
|
_package_data_contain_module
|
pekkarr/scikit-build
|
python
|
def _package_data_contain_module(module, package_data):
'Return True if the ``module`` is contained\n in the ``package_data``.\n\n ``module`` is a tuple of the form\n ``(package, modulename, module_file)``.\n '
(package, _, module_file) = module
if (package not in package_data):
return False
if (strip_package(package.split('.'), module_file) in package_data[package]):
return True
return False
|
def _should_run_cmake(commands, cmake_with_sdist):
'Return True if at least one command requiring ``cmake`` to run\n is found in ``commands``.'
for expected_command in ['build', 'build_ext', 'develop', 'install', 'install_lib', 'bdist', 'bdist_dumb', 'bdist_egg', 'bdist_rpm', 'bdist_wininst', 'bdist_wheel', 'test']:
if (expected_command in commands):
return True
if (('sdist' in commands) and cmake_with_sdist):
return True
return False
| -6,954,401,875,759,451,000
|
Return True if at least one command requiring ``cmake`` to run
is found in ``commands``.
|
skbuild/setuptools_wrap.py
|
_should_run_cmake
|
pekkarr/scikit-build
|
python
|
def _should_run_cmake(commands, cmake_with_sdist):
'Return True if at least one command requiring ``cmake`` to run\n is found in ``commands``.'
for expected_command in ['build', 'build_ext', 'develop', 'install', 'install_lib', 'bdist', 'bdist_dumb', 'bdist_egg', 'bdist_rpm', 'bdist_wininst', 'bdist_wheel', 'test']:
if (expected_command in commands):
return True
if (('sdist' in commands) and cmake_with_sdist):
return True
return False
|
def _save_cmake_spec(args):
'Save the CMake spec to disk'
try:
os.makedirs(os.path.dirname(CMAKE_SPEC_FILE()))
except OSError:
pass
with open(CMAKE_SPEC_FILE(), 'w+') as fp:
json.dump(args, fp)
| -7,583,062,114,144,099,000
|
Save the CMake spec to disk
|
skbuild/setuptools_wrap.py
|
_save_cmake_spec
|
pekkarr/scikit-build
|
python
|
def _save_cmake_spec(args):
try:
os.makedirs(os.path.dirname(CMAKE_SPEC_FILE()))
except OSError:
pass
with open(CMAKE_SPEC_FILE(), 'w+') as fp:
json.dump(args, fp)
|
def _load_cmake_spec():
'Load and return the CMake spec from disk'
try:
with open(CMAKE_SPEC_FILE()) as fp:
return json.load(fp)
except (OSError, IOError, ValueError):
return None
| 1,013,507,571,363,470,100
|
Load and return the CMake spec from disk
|
skbuild/setuptools_wrap.py
|
_load_cmake_spec
|
pekkarr/scikit-build
|
python
|
def _load_cmake_spec():
try:
with open(CMAKE_SPEC_FILE()) as fp:
return json.load(fp)
except (OSError, IOError, ValueError):
return None
|
def setup(*args, **kw):
'This function wraps setup() so that we can run cmake, make,\n CMake build, then proceed as usual with setuptools, appending the\n CMake-generated output as necessary.\n\n The CMake project is re-configured only if needed. This is achieved by (1) retrieving the environment mapping\n associated with the generator set in the ``CMakeCache.txt`` file, (2) saving the CMake configure arguments and\n version in :func:`skbuild.constants.CMAKE_SPEC_FILE()`: and (3) re-configuring only if either the generator or\n the CMake specs change.\n '
if ('package_dir' in kw):
for (package, prefix) in kw['package_dir'].items():
if prefix.endswith('/'):
msg = 'package_dir={{{!r}: {!r}}} ends with a trailing slash, which is not supported by setuptools.'.format(package, prefix)
warnings.warn(msg, FutureWarning, stacklevel=2)
kw['package_dir'][package] = prefix[:(- 1)]
(sys.argv, cmake_executable, skip_generator_test, cmake_args, make_args) = parse_args()
cmdclass = kw.get('cmdclass', {})
cmdclass['build'] = cmdclass.get('build', build.build)
cmdclass['build_py'] = cmdclass.get('build_py', build_py.build_py)
cmdclass['build_ext'] = cmdclass.get('build_ext', build_ext.build_ext)
cmdclass['install'] = cmdclass.get('install', install.install)
cmdclass['install_lib'] = cmdclass.get('install_lib', install_lib.install_lib)
cmdclass['install_scripts'] = cmdclass.get('install_scripts', install_scripts.install_scripts)
cmdclass['clean'] = cmdclass.get('clean', clean.clean)
cmdclass['sdist'] = cmdclass.get('sdist', sdist.sdist)
cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
cmdclass['bdist_wheel'] = cmdclass.get('bdist_wheel', bdist_wheel.bdist_wheel)
cmdclass['egg_info'] = cmdclass.get('egg_info', egg_info.egg_info)
cmdclass['generate_source_manifest'] = cmdclass.get('generate_source_manifest', generate_source_manifest.generate_source_manifest)
cmdclass['test'] = cmdclass.get('test', test.test)
kw['cmdclass'] = cmdclass
parameters = {'cmake_args': [], 'cmake_install_dir': '', 'cmake_source_dir': '', 'cmake_with_sdist': False, 'cmake_languages': ('C', 'CXX'), 'cmake_minimum_required_version': None, 'cmake_process_manifest_hook': None, 'cmake_install_target': 'install'}
skbuild_kw = {param: kw.pop(param, value) for (param, value) in parameters.items()}
try:
_check_skbuild_parameters(skbuild_kw)
except SKBuildError as ex:
import traceback
print('Traceback (most recent call last):')
traceback.print_tb(sys.exc_info()[2])
print('')
sys.exit(ex)
cmake_source_dir = skbuild_kw['cmake_source_dir']
if (cmake_source_dir == '.'):
cmake_source_dir = ''
if os.path.isabs(cmake_source_dir):
cmake_source_dir = os.path.relpath(cmake_source_dir)
display_only = has_invalid_arguments = help_commands = False
force_cmake = skip_cmake = False
commands = []
try:
(display_only, help_commands, commands, hide_listing, force_cmake, skip_cmake, plat_name, build_ext_inplace) = _parse_setuptools_arguments(kw)
except (DistutilsArgError, DistutilsGetoptError):
has_invalid_arguments = True
has_cmakelists = os.path.exists(os.path.join(cmake_source_dir, 'CMakeLists.txt'))
if (not has_cmakelists):
print('skipping skbuild (no CMakeLists.txt found)')
skip_skbuild = (display_only or has_invalid_arguments or (not _should_run_cmake(commands, skbuild_kw['cmake_with_sdist'])) or (not has_cmakelists))
if (skip_skbuild and (not force_cmake)):
if help_commands:
skbuild_parser = create_skbuild_argparser()
arg_descriptions = [line for line in skbuild_parser.format_help().split('\n') if line.startswith(' ')]
print('scikit-build options:')
print('\n'.join(arg_descriptions))
print('')
print('Arguments following a "--" are passed directly to CMake (e.g. -DMY_VAR:BOOL=TRUE).')
print('Arguments following a second "--" are passed directly to the build tool.')
print('')
return setuptools.setup(*args, **kw)
developer_mode = (('develop' in commands) or ('test' in commands) or build_ext_inplace)
packages = kw.get('packages', [])
package_dir = kw.get('package_dir', {})
package_data = copy.deepcopy(kw.get('package_data', {}))
py_modules = kw.get('py_modules', [])
new_py_modules = {py_module: False for py_module in py_modules}
scripts = kw.get('scripts', [])
new_scripts = {script: False for script in scripts}
data_files = {(parent_dir or '.'): set(file_list) for (parent_dir, file_list) in kw.get('data_files', [])}
cmake_args = (skbuild_kw['cmake_args'] + cmake_args)
cmake_install_target_from_command = next((make_args[(index + 1)] for (index, item) in enumerate(make_args) if (item == '--install-target')), '')
cmake_install_target_from_setup = skbuild_kw['cmake_install_target']
if cmake_install_target_from_command:
cmake_install_target = cmake_install_target_from_command
else:
cmake_install_target = cmake_install_target_from_setup
env_cmake_args = (os.environ['CMAKE_ARGS'].split() if ('CMAKE_ARGS' in os.environ) else [])
env_cmake_args = [s for s in env_cmake_args if ('CMAKE_INSTALL_PREFIX' not in s)]
cmake_args = (env_cmake_args + cmake_args)
if (sys.platform == 'darwin'):
if (plat_name is None):
plat_name = skbuild_plat_name()
(_, version, machine) = plat_name.split('-')
for cmake_arg in cmake_args:
if ('CMAKE_OSX_DEPLOYMENT_TARGET' in cmake_arg):
version = cmake_arg.split('=')[1]
if ('CMAKE_OSX_ARCHITECTURES' in cmake_arg):
machine = cmake_arg.split('=')[1]
if (set(machine.split(';')) == {'x86_64', 'arm64'}):
machine = 'universal2'
set_skbuild_plat_name('macosx-{}-{}'.format(version, machine))
os.environ.setdefault('_PYTHON_HOST_PLATFORM', skbuild_plat_name())
(_, version, machine) = skbuild_plat_name().split('-')
if (not cmaker.has_cmake_cache_arg(cmake_args, 'CMAKE_OSX_DEPLOYMENT_TARGET')):
cmake_args.append(('-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=%s' % version))
if (not cmaker.has_cmake_cache_arg(cmake_args, 'CMAKE_OSX_ARCHITECTURES')):
machine_archs = ('x86_64;arm64' if (machine == 'universal2') else machine)
cmake_args.append(('-DCMAKE_OSX_ARCHITECTURES:STRING=%s' % machine_archs))
for package in kw.get('setup_requires', []):
if (Requirement(package).name == 'cmake'):
setup_requires = [package]
dist = upstream_Distribution({'setup_requires': setup_requires})
dist.fetch_build_eggs(setup_requires)
import cmake
for executable in ['cmake', 'cpack', 'ctest']:
executable = os.path.join(cmake.CMAKE_BIN_DIR, executable)
if (platform.system().lower() == 'windows'):
executable += '.exe'
st = os.stat(executable)
permissions = (((st.st_mode | stat.S_IXUSR) | stat.S_IXGRP) | stat.S_IXOTH)
os.chmod(executable, permissions)
cmake_executable = os.path.join(cmake.CMAKE_BIN_DIR, 'cmake')
break
cmake_languages = skbuild_kw['cmake_languages']
try:
if (cmake_executable is None):
cmake_executable = CMAKE_DEFAULT_EXECUTABLE
cmkr = cmaker.CMaker(cmake_executable)
if (not skip_cmake):
cmake_minimum_required_version = skbuild_kw['cmake_minimum_required_version']
if (cmake_minimum_required_version is not None):
if (parse_version(cmkr.cmake_version) < parse_version(cmake_minimum_required_version)):
raise SKBuildError('CMake version {} or higher is required. CMake version {} is being used'.format(cmake_minimum_required_version, cmkr.cmake_version))
cmake_spec = {'args': ([which(CMAKE_DEFAULT_EXECUTABLE)] + cmake_args), 'version': cmkr.cmake_version, 'environment': {'PYTHONNOUSERSITE': os.environ.get('PYTHONNOUSERSITE'), 'PYTHONPATH': os.environ.get('PYTHONPATH')}}
env = cmkr.get_cached_generator_env()
if ((env is None) or (cmake_spec != _load_cmake_spec())):
env = cmkr.configure(cmake_args, skip_generator_test=skip_generator_test, cmake_source_dir=cmake_source_dir, cmake_install_dir=skbuild_kw['cmake_install_dir'], languages=cmake_languages)
_save_cmake_spec(cmake_spec)
cmkr.make(make_args, install_target=cmake_install_target, env=env)
except SKBuildGeneratorNotFoundError as ex:
sys.exit(ex)
except SKBuildError as ex:
import traceback
print('Traceback (most recent call last):')
traceback.print_tb(sys.exc_info()[2])
print('')
sys.exit(ex)
for package in packages:
if (package not in package_dir):
package_dir[package] = package.replace('.', '/')
if ('' in package_dir):
package_dir[package] = to_unix_path(os.path.join(package_dir[''], package_dir[package]))
kw['package_dir'] = package_dir
package_prefixes = _collect_package_prefixes(package_dir, packages)
cmake_manifest = cmkr.install()
process_manifest = skbuild_kw.get('cmake_process_manifest_hook')
if (process_manifest is not None):
if callable(process_manifest):
cmake_manifest = process_manifest(cmake_manifest)
else:
raise SKBuildError('The cmake_process_manifest_hook argument should be callable.')
_classify_installed_files(cmake_manifest, package_data, package_prefixes, py_modules, new_py_modules, scripts, new_scripts, data_files, cmake_source_dir, skbuild_kw['cmake_install_dir'])
original_manifestin_data_files = []
if kw.get('include_package_data', False):
original_manifestin_data_files = parse_manifestin(os.path.join(os.getcwd(), 'MANIFEST.in'))
for path in original_manifestin_data_files:
_classify_file(path, package_data, package_prefixes, py_modules, new_py_modules, scripts, new_scripts, data_files)
if developer_mode:
for (package, package_file_list) in package_data.items():
for package_file in package_file_list:
package_file = os.path.join(package_dir[package], package_file)
cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
if os.path.exists(cmake_file):
_copy_file(cmake_file, package_file, hide_listing)
for py_module in py_modules:
package_file = (py_module + '.py')
cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
if os.path.exists(cmake_file):
_copy_file(cmake_file, package_file, hide_listing)
else:
_consolidate_package_modules(cmake_source_dir, packages, package_dir, py_modules, package_data, hide_listing)
original_package_data = kw.get('package_data', {}).copy()
_consolidate_package_data_files(original_package_data, package_prefixes, hide_listing)
for data_file in original_manifestin_data_files:
dest_data_file = os.path.join(CMAKE_INSTALL_DIR(), data_file)
_copy_file(data_file, dest_data_file, hide_listing)
kw['package_data'] = package_data
kw['package_dir'] = {package: (os.path.join(CMAKE_INSTALL_DIR(), prefix) if os.path.exists(os.path.join(CMAKE_INSTALL_DIR(), prefix)) else prefix) for (prefix, package) in package_prefixes}
kw['scripts'] = [(os.path.join(CMAKE_INSTALL_DIR(), script) if mask else script) for (script, mask) in new_scripts.items()]
kw['data_files'] = [(parent_dir, list(file_set)) for (parent_dir, file_set) in data_files.items()]
if ('zip_safe' not in kw):
kw['zip_safe'] = False
class BinaryDistribution(upstream_Distribution):
def has_ext_modules(self):
return has_cmakelists
kw['distclass'] = BinaryDistribution
print('')
return setuptools.setup(*args, **kw)
| -6,127,948,168,491,292,000
|
This function wraps setup() so that we can run cmake, make,
CMake build, then proceed as usual with setuptools, appending the
CMake-generated output as necessary.
The CMake project is re-configured only if needed. This is achieved by (1) retrieving the environment mapping
associated with the generator set in the ``CMakeCache.txt`` file, (2) saving the CMake configure arguments and
version in :func:`skbuild.constants.CMAKE_SPEC_FILE()`: and (3) re-configuring only if either the generator or
the CMake specs change.
|
skbuild/setuptools_wrap.py
|
setup
|
pekkarr/scikit-build
|
python
|
def setup(*args, **kw):
'This function wraps setup() so that we can run cmake, make,\n CMake build, then proceed as usual with setuptools, appending the\n CMake-generated output as necessary.\n\n The CMake project is re-configured only if needed. This is achieved by (1) retrieving the environment mapping\n associated with the generator set in the ``CMakeCache.txt`` file, (2) saving the CMake configure arguments and\n version in :func:`skbuild.constants.CMAKE_SPEC_FILE()`: and (3) re-configuring only if either the generator or\n the CMake specs change.\n '
if ('package_dir' in kw):
for (package, prefix) in kw['package_dir'].items():
if prefix.endswith('/'):
msg = 'package_dir={{{!r}: {!r}}} ends with a trailing slash, which is not supported by setuptools.'.format(package, prefix)
warnings.warn(msg, FutureWarning, stacklevel=2)
kw['package_dir'][package] = prefix[:(- 1)]
(sys.argv, cmake_executable, skip_generator_test, cmake_args, make_args) = parse_args()
cmdclass = kw.get('cmdclass', {})
cmdclass['build'] = cmdclass.get('build', build.build)
cmdclass['build_py'] = cmdclass.get('build_py', build_py.build_py)
cmdclass['build_ext'] = cmdclass.get('build_ext', build_ext.build_ext)
cmdclass['install'] = cmdclass.get('install', install.install)
cmdclass['install_lib'] = cmdclass.get('install_lib', install_lib.install_lib)
cmdclass['install_scripts'] = cmdclass.get('install_scripts', install_scripts.install_scripts)
cmdclass['clean'] = cmdclass.get('clean', clean.clean)
cmdclass['sdist'] = cmdclass.get('sdist', sdist.sdist)
cmdclass['bdist'] = cmdclass.get('bdist', bdist.bdist)
cmdclass['bdist_wheel'] = cmdclass.get('bdist_wheel', bdist_wheel.bdist_wheel)
cmdclass['egg_info'] = cmdclass.get('egg_info', egg_info.egg_info)
cmdclass['generate_source_manifest'] = cmdclass.get('generate_source_manifest', generate_source_manifest.generate_source_manifest)
cmdclass['test'] = cmdclass.get('test', test.test)
kw['cmdclass'] = cmdclass
parameters = {'cmake_args': [], 'cmake_install_dir': , 'cmake_source_dir': , 'cmake_with_sdist': False, 'cmake_languages': ('C', 'CXX'), 'cmake_minimum_required_version': None, 'cmake_process_manifest_hook': None, 'cmake_install_target': 'install'}
skbuild_kw = {param: kw.pop(param, value) for (param, value) in parameters.items()}
try:
_check_skbuild_parameters(skbuild_kw)
except SKBuildError as ex:
import traceback
print('Traceback (most recent call last):')
traceback.print_tb(sys.exc_info()[2])
print()
sys.exit(ex)
cmake_source_dir = skbuild_kw['cmake_source_dir']
if (cmake_source_dir == '.'):
cmake_source_dir =
if os.path.isabs(cmake_source_dir):
cmake_source_dir = os.path.relpath(cmake_source_dir)
display_only = has_invalid_arguments = help_commands = False
force_cmake = skip_cmake = False
commands = []
try:
(display_only, help_commands, commands, hide_listing, force_cmake, skip_cmake, plat_name, build_ext_inplace) = _parse_setuptools_arguments(kw)
except (DistutilsArgError, DistutilsGetoptError):
has_invalid_arguments = True
has_cmakelists = os.path.exists(os.path.join(cmake_source_dir, 'CMakeLists.txt'))
if (not has_cmakelists):
print('skipping skbuild (no CMakeLists.txt found)')
skip_skbuild = (display_only or has_invalid_arguments or (not _should_run_cmake(commands, skbuild_kw['cmake_with_sdist'])) or (not has_cmakelists))
if (skip_skbuild and (not force_cmake)):
if help_commands:
skbuild_parser = create_skbuild_argparser()
arg_descriptions = [line for line in skbuild_parser.format_help().split('\n') if line.startswith(' ')]
print('scikit-build options:')
print('\n'.join(arg_descriptions))
print()
print('Arguments following a "--" are passed directly to CMake (e.g. -DMY_VAR:BOOL=TRUE).')
print('Arguments following a second "--" are passed directly to the build tool.')
print()
return setuptools.setup(*args, **kw)
developer_mode = (('develop' in commands) or ('test' in commands) or build_ext_inplace)
packages = kw.get('packages', [])
package_dir = kw.get('package_dir', {})
package_data = copy.deepcopy(kw.get('package_data', {}))
py_modules = kw.get('py_modules', [])
new_py_modules = {py_module: False for py_module in py_modules}
scripts = kw.get('scripts', [])
new_scripts = {script: False for script in scripts}
data_files = {(parent_dir or '.'): set(file_list) for (parent_dir, file_list) in kw.get('data_files', [])}
cmake_args = (skbuild_kw['cmake_args'] + cmake_args)
cmake_install_target_from_command = next((make_args[(index + 1)] for (index, item) in enumerate(make_args) if (item == '--install-target')), )
cmake_install_target_from_setup = skbuild_kw['cmake_install_target']
if cmake_install_target_from_command:
cmake_install_target = cmake_install_target_from_command
else:
cmake_install_target = cmake_install_target_from_setup
env_cmake_args = (os.environ['CMAKE_ARGS'].split() if ('CMAKE_ARGS' in os.environ) else [])
env_cmake_args = [s for s in env_cmake_args if ('CMAKE_INSTALL_PREFIX' not in s)]
cmake_args = (env_cmake_args + cmake_args)
if (sys.platform == 'darwin'):
if (plat_name is None):
plat_name = skbuild_plat_name()
(_, version, machine) = plat_name.split('-')
for cmake_arg in cmake_args:
if ('CMAKE_OSX_DEPLOYMENT_TARGET' in cmake_arg):
version = cmake_arg.split('=')[1]
if ('CMAKE_OSX_ARCHITECTURES' in cmake_arg):
machine = cmake_arg.split('=')[1]
if (set(machine.split(';')) == {'x86_64', 'arm64'}):
machine = 'universal2'
set_skbuild_plat_name('macosx-{}-{}'.format(version, machine))
os.environ.setdefault('_PYTHON_HOST_PLATFORM', skbuild_plat_name())
(_, version, machine) = skbuild_plat_name().split('-')
if (not cmaker.has_cmake_cache_arg(cmake_args, 'CMAKE_OSX_DEPLOYMENT_TARGET')):
cmake_args.append(('-DCMAKE_OSX_DEPLOYMENT_TARGET:STRING=%s' % version))
if (not cmaker.has_cmake_cache_arg(cmake_args, 'CMAKE_OSX_ARCHITECTURES')):
machine_archs = ('x86_64;arm64' if (machine == 'universal2') else machine)
cmake_args.append(('-DCMAKE_OSX_ARCHITECTURES:STRING=%s' % machine_archs))
for package in kw.get('setup_requires', []):
if (Requirement(package).name == 'cmake'):
setup_requires = [package]
dist = upstream_Distribution({'setup_requires': setup_requires})
dist.fetch_build_eggs(setup_requires)
import cmake
for executable in ['cmake', 'cpack', 'ctest']:
executable = os.path.join(cmake.CMAKE_BIN_DIR, executable)
if (platform.system().lower() == 'windows'):
executable += '.exe'
st = os.stat(executable)
permissions = (((st.st_mode | stat.S_IXUSR) | stat.S_IXGRP) | stat.S_IXOTH)
os.chmod(executable, permissions)
cmake_executable = os.path.join(cmake.CMAKE_BIN_DIR, 'cmake')
break
cmake_languages = skbuild_kw['cmake_languages']
try:
if (cmake_executable is None):
cmake_executable = CMAKE_DEFAULT_EXECUTABLE
cmkr = cmaker.CMaker(cmake_executable)
if (not skip_cmake):
cmake_minimum_required_version = skbuild_kw['cmake_minimum_required_version']
if (cmake_minimum_required_version is not None):
if (parse_version(cmkr.cmake_version) < parse_version(cmake_minimum_required_version)):
raise SKBuildError('CMake version {} or higher is required. CMake version {} is being used'.format(cmake_minimum_required_version, cmkr.cmake_version))
cmake_spec = {'args': ([which(CMAKE_DEFAULT_EXECUTABLE)] + cmake_args), 'version': cmkr.cmake_version, 'environment': {'PYTHONNOUSERSITE': os.environ.get('PYTHONNOUSERSITE'), 'PYTHONPATH': os.environ.get('PYTHONPATH')}}
env = cmkr.get_cached_generator_env()
if ((env is None) or (cmake_spec != _load_cmake_spec())):
env = cmkr.configure(cmake_args, skip_generator_test=skip_generator_test, cmake_source_dir=cmake_source_dir, cmake_install_dir=skbuild_kw['cmake_install_dir'], languages=cmake_languages)
_save_cmake_spec(cmake_spec)
cmkr.make(make_args, install_target=cmake_install_target, env=env)
except SKBuildGeneratorNotFoundError as ex:
sys.exit(ex)
except SKBuildError as ex:
import traceback
print('Traceback (most recent call last):')
traceback.print_tb(sys.exc_info()[2])
print()
sys.exit(ex)
for package in packages:
if (package not in package_dir):
package_dir[package] = package.replace('.', '/')
if ( in package_dir):
package_dir[package] = to_unix_path(os.path.join(package_dir[], package_dir[package]))
kw['package_dir'] = package_dir
package_prefixes = _collect_package_prefixes(package_dir, packages)
cmake_manifest = cmkr.install()
process_manifest = skbuild_kw.get('cmake_process_manifest_hook')
if (process_manifest is not None):
if callable(process_manifest):
cmake_manifest = process_manifest(cmake_manifest)
else:
raise SKBuildError('The cmake_process_manifest_hook argument should be callable.')
_classify_installed_files(cmake_manifest, package_data, package_prefixes, py_modules, new_py_modules, scripts, new_scripts, data_files, cmake_source_dir, skbuild_kw['cmake_install_dir'])
original_manifestin_data_files = []
if kw.get('include_package_data', False):
original_manifestin_data_files = parse_manifestin(os.path.join(os.getcwd(), 'MANIFEST.in'))
for path in original_manifestin_data_files:
_classify_file(path, package_data, package_prefixes, py_modules, new_py_modules, scripts, new_scripts, data_files)
if developer_mode:
for (package, package_file_list) in package_data.items():
for package_file in package_file_list:
package_file = os.path.join(package_dir[package], package_file)
cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
if os.path.exists(cmake_file):
_copy_file(cmake_file, package_file, hide_listing)
for py_module in py_modules:
package_file = (py_module + '.py')
cmake_file = os.path.join(CMAKE_INSTALL_DIR(), package_file)
if os.path.exists(cmake_file):
_copy_file(cmake_file, package_file, hide_listing)
else:
_consolidate_package_modules(cmake_source_dir, packages, package_dir, py_modules, package_data, hide_listing)
original_package_data = kw.get('package_data', {}).copy()
_consolidate_package_data_files(original_package_data, package_prefixes, hide_listing)
for data_file in original_manifestin_data_files:
dest_data_file = os.path.join(CMAKE_INSTALL_DIR(), data_file)
_copy_file(data_file, dest_data_file, hide_listing)
kw['package_data'] = package_data
kw['package_dir'] = {package: (os.path.join(CMAKE_INSTALL_DIR(), prefix) if os.path.exists(os.path.join(CMAKE_INSTALL_DIR(), prefix)) else prefix) for (prefix, package) in package_prefixes}
kw['scripts'] = [(os.path.join(CMAKE_INSTALL_DIR(), script) if mask else script) for (script, mask) in new_scripts.items()]
kw['data_files'] = [(parent_dir, list(file_set)) for (parent_dir, file_set) in data_files.items()]
if ('zip_safe' not in kw):
kw['zip_safe'] = False
class BinaryDistribution(upstream_Distribution):
def has_ext_modules(self):
return has_cmakelists
kw['distclass'] = BinaryDistribution
print()
return setuptools.setup(*args, **kw)
|
def _collect_package_prefixes(package_dir, packages):
'\n Collect the list of prefixes for all packages\n\n The list is used to match paths in the install manifest to packages\n specified in the setup.py script.\n\n The list is sorted in decreasing order of prefix length so that paths are\n matched with their immediate parent package, instead of any of that\n package\'s ancestors.\n\n For example, consider the project structure below. Assume that the\n setup call was made with a package list featuring "top" and "top.bar", but\n not "top.not_a_subpackage".\n\n ::\n\n top/ -> top/\n __init__.py -> top/__init__.py (parent: top)\n foo.py -> top/foo.py (parent: top)\n bar/ -> top/bar/ (parent: top)\n __init__.py -> top/bar/__init__.py (parent: top.bar)\n\n not_a_subpackage/ -> top/not_a_subpackage/ (parent: top)\n data_0.txt -> top/not_a_subpackage/data_0.txt (parent: top)\n data_1.txt -> top/not_a_subpackage/data_1.txt (parent: top)\n\n The paths in the generated install manifest are matched to packages\n according to the parents indicated on the right. Only packages that are\n specified in the setup() call are considered. Because of the sort order,\n the data files on the bottom would have been mapped to\n "top.not_a_subpackage" instead of "top", proper -- had such a package been\n specified.\n '
return list(sorted(((package_dir[package].replace('.', '/'), package) for package in packages), key=(lambda tup: len(tup[0])), reverse=True))
| 1,475,335,275,993,887,200
|
Collect the list of prefixes for all packages
The list is used to match paths in the install manifest to packages
specified in the setup.py script.
The list is sorted in decreasing order of prefix length so that paths are
matched with their immediate parent package, instead of any of that
package's ancestors.
For example, consider the project structure below. Assume that the
setup call was made with a package list featuring "top" and "top.bar", but
not "top.not_a_subpackage".
::
top/ -> top/
__init__.py -> top/__init__.py (parent: top)
foo.py -> top/foo.py (parent: top)
bar/ -> top/bar/ (parent: top)
__init__.py -> top/bar/__init__.py (parent: top.bar)
not_a_subpackage/ -> top/not_a_subpackage/ (parent: top)
data_0.txt -> top/not_a_subpackage/data_0.txt (parent: top)
data_1.txt -> top/not_a_subpackage/data_1.txt (parent: top)
The paths in the generated install manifest are matched to packages
according to the parents indicated on the right. Only packages that are
specified in the setup() call are considered. Because of the sort order,
the data files on the bottom would have been mapped to
"top.not_a_subpackage" instead of "top", proper -- had such a package been
specified.
|
skbuild/setuptools_wrap.py
|
_collect_package_prefixes
|
pekkarr/scikit-build
|
python
|
def _collect_package_prefixes(package_dir, packages):
'\n Collect the list of prefixes for all packages\n\n The list is used to match paths in the install manifest to packages\n specified in the setup.py script.\n\n The list is sorted in decreasing order of prefix length so that paths are\n matched with their immediate parent package, instead of any of that\n package\'s ancestors.\n\n For example, consider the project structure below. Assume that the\n setup call was made with a package list featuring "top" and "top.bar", but\n not "top.not_a_subpackage".\n\n ::\n\n top/ -> top/\n __init__.py -> top/__init__.py (parent: top)\n foo.py -> top/foo.py (parent: top)\n bar/ -> top/bar/ (parent: top)\n __init__.py -> top/bar/__init__.py (parent: top.bar)\n\n not_a_subpackage/ -> top/not_a_subpackage/ (parent: top)\n data_0.txt -> top/not_a_subpackage/data_0.txt (parent: top)\n data_1.txt -> top/not_a_subpackage/data_1.txt (parent: top)\n\n The paths in the generated install manifest are matched to packages\n according to the parents indicated on the right. Only packages that are\n specified in the setup() call are considered. Because of the sort order,\n the data files on the bottom would have been mapped to\n "top.not_a_subpackage" instead of "top", proper -- had such a package been\n specified.\n '
return list(sorted(((package_dir[package].replace('.', '/'), package) for package in packages), key=(lambda tup: len(tup[0])), reverse=True))
|
def _copy_file(src_file, dest_file, hide_listing=True):
'Copy ``src_file`` to ``dest_file`` ensuring parent directory exists.\n\n By default, message like `creating directory /path/to/package` and\n `copying directory /src/path/to/package -> path/to/package` are displayed\n on standard output. Setting ``hide_listing`` to False avoids message from\n being displayed.\n '
dest_dir = os.path.dirname(dest_file)
if ((dest_dir != '') and (not os.path.exists(dest_dir))):
if (not hide_listing):
print('creating directory {}'.format(dest_dir))
mkdir_p(dest_dir)
if (not hide_listing):
print('copying {} -> {}'.format(src_file, dest_file))
copyfile(src_file, dest_file)
copymode(src_file, dest_file)
| 6,659,556,599,875,273,000
|
Copy ``src_file`` to ``dest_file`` ensuring parent directory exists.
By default, message like `creating directory /path/to/package` and
`copying directory /src/path/to/package -> path/to/package` are displayed
on standard output. Setting ``hide_listing`` to False avoids message from
being displayed.
|
skbuild/setuptools_wrap.py
|
_copy_file
|
pekkarr/scikit-build
|
python
|
def _copy_file(src_file, dest_file, hide_listing=True):
'Copy ``src_file`` to ``dest_file`` ensuring parent directory exists.\n\n By default, message like `creating directory /path/to/package` and\n `copying directory /src/path/to/package -> path/to/package` are displayed\n on standard output. Setting ``hide_listing`` to False avoids message from\n being displayed.\n '
dest_dir = os.path.dirname(dest_file)
if ((dest_dir != ) and (not os.path.exists(dest_dir))):
if (not hide_listing):
print('creating directory {}'.format(dest_dir))
mkdir_p(dest_dir)
if (not hide_listing):
print('copying {} -> {}'.format(src_file, dest_file))
copyfile(src_file, dest_file)
copymode(src_file, dest_file)
|
def _consolidate_package_modules(cmake_source_dir, packages, package_dir, py_modules, package_data, hide_listing):
'This function consolidates packages having modules located in\n both the source tree and the CMake install tree into one location.\n\n The one location is the CMake install tree\n (see :func:`.constants.CMAKE_INSTALL_DIR()`).\n\n Why ? This is a necessary evil because ``Setuptools`` keeps track of\n packages and modules files to install using a dictionary of lists where\n the key are package names (e.g ``foo.bar``) and the values are lists of\n module files (e.g ``[\'__init__.py\', \'baz.py\']``. Since this doesn\'t allow\n to "split" files associated with a given module in multiple location, one\n location is selected, and files are copied over.\n\n How? It currently searches for modules across both locations using\n the :class:`.utils.PythonModuleFinder`. then with the help\n of :func:`_package_data_contain_module`, it identifies which\n one are either already included or missing from the distribution.\n\n Once a module has been identified as ``missing``, it is both copied\n into the :func:`.constants.CMAKE_INSTALL_DIR()` and added to the\n ``package_data`` dictionary so that it can be considered by\n the upstream setup function.\n '
try:
modules = PythonModuleFinder(packages, package_dir, py_modules, alternative_build_base=CMAKE_INSTALL_DIR()).find_all_modules()
except DistutilsError as msg:
raise SystemExit('error: {}'.format(str(msg)))
print('')
for entry in modules:
if _package_data_contain_module(entry, package_data):
continue
(package, _, src_module_file) = entry
if os.path.exists(src_module_file):
dest_module_file = os.path.join(CMAKE_INSTALL_DIR(), src_module_file)
_copy_file(src_module_file, dest_module_file, hide_listing)
package_parts = []
if cmake_source_dir:
package_parts = cmake_source_dir.split(os.path.sep)
package_parts += package.split('.')
stripped_module_file = strip_package(package_parts, src_module_file)
try:
package_data[package].append(stripped_module_file)
except KeyError:
package_data[package] = [stripped_module_file]
| -8,092,281,073,517,067,000
|
This function consolidates packages having modules located in
both the source tree and the CMake install tree into one location.
The one location is the CMake install tree
(see :func:`.constants.CMAKE_INSTALL_DIR()`).
Why ? This is a necessary evil because ``Setuptools`` keeps track of
packages and modules files to install using a dictionary of lists where
the key are package names (e.g ``foo.bar``) and the values are lists of
module files (e.g ``['__init__.py', 'baz.py']``. Since this doesn't allow
to "split" files associated with a given module in multiple location, one
location is selected, and files are copied over.
How? It currently searches for modules across both locations using
the :class:`.utils.PythonModuleFinder`. then with the help
of :func:`_package_data_contain_module`, it identifies which
one are either already included or missing from the distribution.
Once a module has been identified as ``missing``, it is both copied
into the :func:`.constants.CMAKE_INSTALL_DIR()` and added to the
``package_data`` dictionary so that it can be considered by
the upstream setup function.
|
skbuild/setuptools_wrap.py
|
_consolidate_package_modules
|
pekkarr/scikit-build
|
python
|
def _consolidate_package_modules(cmake_source_dir, packages, package_dir, py_modules, package_data, hide_listing):
'This function consolidates packages having modules located in\n both the source tree and the CMake install tree into one location.\n\n The one location is the CMake install tree\n (see :func:`.constants.CMAKE_INSTALL_DIR()`).\n\n Why ? This is a necessary evil because ``Setuptools`` keeps track of\n packages and modules files to install using a dictionary of lists where\n the key are package names (e.g ``foo.bar``) and the values are lists of\n module files (e.g ``[\'__init__.py\', \'baz.py\']``. Since this doesn\'t allow\n to "split" files associated with a given module in multiple location, one\n location is selected, and files are copied over.\n\n How? It currently searches for modules across both locations using\n the :class:`.utils.PythonModuleFinder`. then with the help\n of :func:`_package_data_contain_module`, it identifies which\n one are either already included or missing from the distribution.\n\n Once a module has been identified as ``missing``, it is both copied\n into the :func:`.constants.CMAKE_INSTALL_DIR()` and added to the\n ``package_data`` dictionary so that it can be considered by\n the upstream setup function.\n '
try:
modules = PythonModuleFinder(packages, package_dir, py_modules, alternative_build_base=CMAKE_INSTALL_DIR()).find_all_modules()
except DistutilsError as msg:
raise SystemExit('error: {}'.format(str(msg)))
print()
for entry in modules:
if _package_data_contain_module(entry, package_data):
continue
(package, _, src_module_file) = entry
if os.path.exists(src_module_file):
dest_module_file = os.path.join(CMAKE_INSTALL_DIR(), src_module_file)
_copy_file(src_module_file, dest_module_file, hide_listing)
package_parts = []
if cmake_source_dir:
package_parts = cmake_source_dir.split(os.path.sep)
package_parts += package.split('.')
stripped_module_file = strip_package(package_parts, src_module_file)
try:
package_data[package].append(stripped_module_file)
except KeyError:
package_data[package] = [stripped_module_file]
|
def _consolidate_package_data_files(original_package_data, package_prefixes, hide_listing):
"This function copies package data files specified using the ``package_data`` keyword\n into :func:`.constants.CMAKE_INSTALL_DIR()`.\n\n ::\n\n setup(...,\n packages=['mypkg'],\n package_dir={'mypkg': 'src/mypkg'},\n package_data={'mypkg': ['data/*.dat']},\n )\n\n Considering that (1) the packages associated with modules located in both the source tree and\n the CMake install tree are consolidated into the CMake install tree, and (2) the consolidated\n package path set in the ``package_dir`` dictionary and later used by setuptools to package\n (or install) modules and data files is :func:`.constants.CMAKE_INSTALL_DIR()`, copying the data files\n is required to ensure setuptools can find them when it uses the package directory.\n "
project_root = os.getcwd()
for (prefix, package) in package_prefixes:
if (package not in original_package_data):
continue
raw_patterns = original_package_data[package]
for pattern in raw_patterns:
expanded_package_dir = os.path.join(project_root, prefix, pattern)
for src_data_file in glob(expanded_package_dir):
full_prefix_length = (len(os.path.join(project_root, prefix)) + 1)
data_file = src_data_file[full_prefix_length:]
dest_data_file = os.path.join(CMAKE_INSTALL_DIR(), prefix, data_file)
_copy_file(src_data_file, dest_data_file, hide_listing)
| 2,946,959,034,722,457,000
|
This function copies package data files specified using the ``package_data`` keyword
into :func:`.constants.CMAKE_INSTALL_DIR()`.
::
setup(...,
packages=['mypkg'],
package_dir={'mypkg': 'src/mypkg'},
package_data={'mypkg': ['data/*.dat']},
)
Considering that (1) the packages associated with modules located in both the source tree and
the CMake install tree are consolidated into the CMake install tree, and (2) the consolidated
package path set in the ``package_dir`` dictionary and later used by setuptools to package
(or install) modules and data files is :func:`.constants.CMAKE_INSTALL_DIR()`, copying the data files
is required to ensure setuptools can find them when it uses the package directory.
|
skbuild/setuptools_wrap.py
|
_consolidate_package_data_files
|
pekkarr/scikit-build
|
python
|
def _consolidate_package_data_files(original_package_data, package_prefixes, hide_listing):
"This function copies package data files specified using the ``package_data`` keyword\n into :func:`.constants.CMAKE_INSTALL_DIR()`.\n\n ::\n\n setup(...,\n packages=['mypkg'],\n package_dir={'mypkg': 'src/mypkg'},\n package_data={'mypkg': ['data/*.dat']},\n )\n\n Considering that (1) the packages associated with modules located in both the source tree and\n the CMake install tree are consolidated into the CMake install tree, and (2) the consolidated\n package path set in the ``package_dir`` dictionary and later used by setuptools to package\n (or install) modules and data files is :func:`.constants.CMAKE_INSTALL_DIR()`, copying the data files\n is required to ensure setuptools can find them when it uses the package directory.\n "
project_root = os.getcwd()
for (prefix, package) in package_prefixes:
if (package not in original_package_data):
continue
raw_patterns = original_package_data[package]
for pattern in raw_patterns:
expanded_package_dir = os.path.join(project_root, prefix, pattern)
for src_data_file in glob(expanded_package_dir):
full_prefix_length = (len(os.path.join(project_root, prefix)) + 1)
data_file = src_data_file[full_prefix_length:]
dest_data_file = os.path.join(CMAKE_INSTALL_DIR(), prefix, data_file)
_copy_file(src_data_file, dest_data_file, hide_listing)
|
def _cmake_set(self, tline: CMakeTraceLine) -> None:
"Handler for the CMake set() function in all variaties.\n\n comes in three flavors:\n set(<var> <value> [PARENT_SCOPE])\n set(<var> <value> CACHE <type> <docstring> [FORCE])\n set(ENV{<var>} <value>)\n\n We don't support the ENV variant, and any uses of it will be ignored\n silently. the other two variates are supported, with some caveats:\n - we don't properly handle scoping, so calls to set() inside a\n function without PARENT_SCOPE set could incorrectly shadow the\n outer scope.\n - We don't honor the type of CACHE arguments\n "
cache_type = None
cache_force = ('FORCE' in tline.args)
try:
cache_idx = tline.args.index('CACHE')
cache_type = tline.args[(cache_idx + 1)]
except (ValueError, IndexError):
pass
args = []
for i in tline.args:
if ((not i) or (i == 'PARENT_SCOPE')):
continue
if (i == 'CACHE'):
break
args.append(i)
if (len(args) < 1):
return self._gen_exception('set', 'requires at least one argument', tline)
identifier = args.pop(0)
value = ' '.join(args)
if cache_type:
if ((identifier not in self.cache) or cache_force):
self.cache[identifier] = CMakeCacheEntry(value.split(';'), cache_type)
if (not value):
if (identifier in self.vars):
del self.vars[identifier]
else:
self.vars[identifier] = value.split(';')
self.vars_by_file.setdefault(tline.file, {})[identifier] = value.split(';')
| 355,447,399,708,182,140
|
Handler for the CMake set() function in all variaties.
comes in three flavors:
set(<var> <value> [PARENT_SCOPE])
set(<var> <value> CACHE <type> <docstring> [FORCE])
set(ENV{<var>} <value>)
We don't support the ENV variant, and any uses of it will be ignored
silently. the other two variates are supported, with some caveats:
- we don't properly handle scoping, so calls to set() inside a
function without PARENT_SCOPE set could incorrectly shadow the
outer scope.
- We don't honor the type of CACHE arguments
|
mesonbuild/cmake/traceparser.py
|
_cmake_set
|
Linux-Defender/meson
|
python
|
def _cmake_set(self, tline: CMakeTraceLine) -> None:
"Handler for the CMake set() function in all variaties.\n\n comes in three flavors:\n set(<var> <value> [PARENT_SCOPE])\n set(<var> <value> CACHE <type> <docstring> [FORCE])\n set(ENV{<var>} <value>)\n\n We don't support the ENV variant, and any uses of it will be ignored\n silently. the other two variates are supported, with some caveats:\n - we don't properly handle scoping, so calls to set() inside a\n function without PARENT_SCOPE set could incorrectly shadow the\n outer scope.\n - We don't honor the type of CACHE arguments\n "
cache_type = None
cache_force = ('FORCE' in tline.args)
try:
cache_idx = tline.args.index('CACHE')
cache_type = tline.args[(cache_idx + 1)]
except (ValueError, IndexError):
pass
args = []
for i in tline.args:
if ((not i) or (i == 'PARENT_SCOPE')):
continue
if (i == 'CACHE'):
break
args.append(i)
if (len(args) < 1):
return self._gen_exception('set', 'requires at least one argument', tline)
identifier = args.pop(0)
value = ' '.join(args)
if cache_type:
if ((identifier not in self.cache) or cache_force):
self.cache[identifier] = CMakeCacheEntry(value.split(';'), cache_type)
if (not value):
if (identifier in self.vars):
del self.vars[identifier]
else:
self.vars[identifier] = value.split(';')
self.vars_by_file.setdefault(tline.file, {})[identifier] = value.split(';')
|
def cmd(tgt, fun, arg=(), timeout=None, expr_form='glob', kwarg=None):
'\n Execute a single command via the salt-ssh subsystem and return all\n routines at once\n\n .. versionaddedd:: 2015.2\n\n A wrapper around the :py:meth:`SSHClient.cmd\n <salt.client.ssh.client.SSHClient.cmd>` method.\n '
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(tgt, fun, arg, timeout, expr_form, kwarg)
| 6,070,789,426,128,428,000
|
Execute a single command via the salt-ssh subsystem and return all
routines at once
.. versionaddedd:: 2015.2
A wrapper around the :py:meth:`SSHClient.cmd
<salt.client.ssh.client.SSHClient.cmd>` method.
|
salt/runners/ssh.py
|
cmd
|
0xf10e/salt
|
python
|
def cmd(tgt, fun, arg=(), timeout=None, expr_form='glob', kwarg=None):
'\n Execute a single command via the salt-ssh subsystem and return all\n routines at once\n\n .. versionaddedd:: 2015.2\n\n A wrapper around the :py:meth:`SSHClient.cmd\n <salt.client.ssh.client.SSHClient.cmd>` method.\n '
client = salt.client.ssh.client.SSHClient(mopts=__opts__)
return client.cmd(tgt, fun, arg, timeout, expr_form, kwarg)
|
def test_salt_cp(master, minion, salt_cp, tempfiles):
'\n Test copying a file from the master to the minion\n '
tfile = tempfile.NamedTemporaryFile(delete=True)
tfile.close()
dest = tfile.name
try:
contents = 'id: foo'
sls = tempfiles.makeslsfile(contents)
assert master.is_alive()
assert minion.is_alive()
ret = salt_cp.run('minion-1', sls, dest)
assert (ret.exitcode == 0), ret
assert (ret.json == {'minion-1': {dest: True}}), ret
assert os.path.exists(dest)
with open(dest) as rfh:
assert (rfh.read() == contents)
finally:
if os.path.exists(dest):
os.unlink(dest)
tfile = tempfile.NamedTemporaryFile(delete=True)
tfile.close()
dest = tfile.name
try:
contents = 'id: foo'
sls = tempfiles.makeslsfile(contents)
assert master.is_alive()
assert minion.is_alive()
ret = salt_cp.run(sls, dest, minion_tgt='minion-1')
assert (ret.exitcode == 0), ret
assert (ret.json == {dest: True}), ret
assert os.path.exists(dest)
with open(dest) as rfh:
assert (rfh.read() == contents)
finally:
if os.path.exists(dest):
os.unlink(dest)
| 2,320,564,476,749,740,000
|
Test copying a file from the master to the minion
|
tests/integration/factories/master/test_master.py
|
test_salt_cp
|
cmcmarrow/pytest-salt-factories
|
python
|
def test_salt_cp(master, minion, salt_cp, tempfiles):
'\n \n '
tfile = tempfile.NamedTemporaryFile(delete=True)
tfile.close()
dest = tfile.name
try:
contents = 'id: foo'
sls = tempfiles.makeslsfile(contents)
assert master.is_alive()
assert minion.is_alive()
ret = salt_cp.run('minion-1', sls, dest)
assert (ret.exitcode == 0), ret
assert (ret.json == {'minion-1': {dest: True}}), ret
assert os.path.exists(dest)
with open(dest) as rfh:
assert (rfh.read() == contents)
finally:
if os.path.exists(dest):
os.unlink(dest)
tfile = tempfile.NamedTemporaryFile(delete=True)
tfile.close()
dest = tfile.name
try:
contents = 'id: foo'
sls = tempfiles.makeslsfile(contents)
assert master.is_alive()
assert minion.is_alive()
ret = salt_cp.run(sls, dest, minion_tgt='minion-1')
assert (ret.exitcode == 0), ret
assert (ret.json == {dest: True}), ret
assert os.path.exists(dest)
with open(dest) as rfh:
assert (rfh.read() == contents)
finally:
if os.path.exists(dest):
os.unlink(dest)
|
def main():
" This script creates HRTF filtered sound samples of the sounds given in the folder SOUND_FILES.\n This is done for each participant's HRTF specified in participant_numbers.\n ALL ELEVATIONS (50) are taken to filter the data.\n\n "
logger = logging.getLogger(__name__)
logger.info('making final data set from raw data')
normalize = False
time_window = 0.1
snrs = np.arange(0, 1.1, 0.1)
freq_bandss = np.array([128])
azimuths = np.array([12])
participant_numbers = np.array([1, 2, 3, 8, 9, 10, 11, 12, 15, 17, 18, 19, 20, 21, 27, 28, 33, 40, 44, 48, 50, 51, 58, 59, 60, 61, 65, 119, 124, 126, 127, 131, 133, 134, 135, 137, 147, 148, 152, 153, 154, 155, 156, 158, 162, 163, 165])
max_freqs = np.array([16000, 20000])
for (_, participant_number) in enumerate(participant_numbers):
for (_, snr) in enumerate(snrs):
for (_, freq_bands) in enumerate(freq_bandss):
for (_, azimuth) in enumerate(azimuths):
for (_, max_freq) in enumerate(max_freqs):
(psd_all_c, psd_all_i) = create_data(freq_bands, participant_number, snr, normalize, azimuth, time_window, max_freq=max_freq)
| -7,958,006,284,450,632,000
|
This script creates HRTF filtered sound samples of the sounds given in the folder SOUND_FILES.
This is done for each participant's HRTF specified in participant_numbers.
ALL ELEVATIONS (50) are taken to filter the data.
|
src/data/generateData.py
|
main
|
oesst/HRTF_neural_model
|
python
|
def main():
" This script creates HRTF filtered sound samples of the sounds given in the folder SOUND_FILES.\n This is done for each participant's HRTF specified in participant_numbers.\n ALL ELEVATIONS (50) are taken to filter the data.\n\n "
logger = logging.getLogger(__name__)
logger.info('making final data set from raw data')
normalize = False
time_window = 0.1
snrs = np.arange(0, 1.1, 0.1)
freq_bandss = np.array([128])
azimuths = np.array([12])
participant_numbers = np.array([1, 2, 3, 8, 9, 10, 11, 12, 15, 17, 18, 19, 20, 21, 27, 28, 33, 40, 44, 48, 50, 51, 58, 59, 60, 61, 65, 119, 124, 126, 127, 131, 133, 134, 135, 137, 147, 148, 152, 153, 154, 155, 156, 158, 162, 163, 165])
max_freqs = np.array([16000, 20000])
for (_, participant_number) in enumerate(participant_numbers):
for (_, snr) in enumerate(snrs):
for (_, freq_bands) in enumerate(freq_bandss):
for (_, azimuth) in enumerate(azimuths):
for (_, max_freq) in enumerate(max_freqs):
(psd_all_c, psd_all_i) = create_data(freq_bands, participant_number, snr, normalize, azimuth, time_window, max_freq=max_freq)
|
@distributed_trace
def begin_cancel(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Cancels the current virtual machine scale set rolling upgrade.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._cancel_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
| 2,920,294,095,514,641,400
|
Cancels the current virtual machine scale set rolling upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_07_01/operations/_virtual_machine_scale_set_rolling_upgrades_operations.py
|
begin_cancel
|
AikoBB/azure-sdk-for-python
|
python
|
@distributed_trace
def begin_cancel(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Cancels the current virtual machine scale set rolling upgrade.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._cancel_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
|
@distributed_trace
def begin_start_os_upgrade(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Starts a rolling upgrade to move all virtual machine scale set instances to the latest\n available Platform Image OS version. Instances which are already running the latest available\n OS version are not affected.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._start_os_upgrade_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
| -6,496,827,081,151,570,000
|
Starts a rolling upgrade to move all virtual machine scale set instances to the latest
available Platform Image OS version. Instances which are already running the latest available
OS version are not affected.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_07_01/operations/_virtual_machine_scale_set_rolling_upgrades_operations.py
|
begin_start_os_upgrade
|
AikoBB/azure-sdk-for-python
|
python
|
@distributed_trace
def begin_start_os_upgrade(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Starts a rolling upgrade to move all virtual machine scale set instances to the latest\n available Platform Image OS version. Instances which are already running the latest available\n OS version are not affected.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._start_os_upgrade_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
|
@distributed_trace
def begin_start_extension_upgrade(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Starts a rolling upgrade to move all extensions for all virtual machine scale set instances to\n the latest available extension version. Instances which are already running the latest\n extension versions are not affected.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._start_extension_upgrade_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
| 5,628,296,231,556,651,000
|
Starts a rolling upgrade to move all extensions for all virtual machine scale set instances to
the latest available extension version. Instances which are already running the latest
extension versions are not affected.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_07_01/operations/_virtual_machine_scale_set_rolling_upgrades_operations.py
|
begin_start_extension_upgrade
|
AikoBB/azure-sdk-for-python
|
python
|
@distributed_trace
def begin_start_extension_upgrade(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> LROPoller[None]:
'Starts a rolling upgrade to move all extensions for all virtual machine scale set instances to\n the latest available extension version. Instances which are already running the latest\n extension versions are not affected.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :keyword str continuation_token: A continuation token to restart a poller from a saved state.\n :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this\n operation to not poll, or pass in your own initialized polling object for a personal polling\n strategy.\n :paramtype polling: bool or ~azure.core.polling.PollingMethod\n :keyword int polling_interval: Default waiting time between two polls for LRO operations if no\n Retry-After header is present.\n :return: An instance of LROPoller that returns either None or the result of cls(response)\n :rtype: ~azure.core.polling.LROPoller[None]\n :raises: ~azure.core.exceptions.HttpResponseError\n '
api_version = kwargs.pop('api_version', '2021-07-01')
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop('polling_interval', self._config.polling_interval)
cont_token = kwargs.pop('continuation_token', None)
if (cont_token is None):
raw_result = self._start_extension_upgrade_initial(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, api_version=api_version, cls=(lambda x, y, z: x), **kwargs)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if (polling is True):
polling_method = ARMPolling(lro_delay, **kwargs)
elif (polling is False):
polling_method = NoPolling()
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
|
@distributed_trace
def get_latest(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> '_models.RollingUpgradeStatusInfo':
'Gets the status of the latest virtual machine scale set rolling upgrade.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: RollingUpgradeStatusInfo, or the result of cls(response)\n :rtype: ~azure.mgmt.compute.v2021_07_01.models.RollingUpgradeStatusInfo\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', '2021-07-01')
request = build_get_latest_request(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.get_latest.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RollingUpgradeStatusInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
| 2,527,987,070,942,094,000
|
Gets the status of the latest virtual machine scale set rolling upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RollingUpgradeStatusInfo, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_07_01.models.RollingUpgradeStatusInfo
:raises: ~azure.core.exceptions.HttpResponseError
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_07_01/operations/_virtual_machine_scale_set_rolling_upgrades_operations.py
|
get_latest
|
AikoBB/azure-sdk-for-python
|
python
|
@distributed_trace
def get_latest(self, resource_group_name: str, vm_scale_set_name: str, **kwargs: Any) -> '_models.RollingUpgradeStatusInfo':
'Gets the status of the latest virtual machine scale set rolling upgrade.\n\n :param resource_group_name: The name of the resource group.\n :type resource_group_name: str\n :param vm_scale_set_name: The name of the VM scale set.\n :type vm_scale_set_name: str\n :keyword callable cls: A custom type or function that will be passed the direct response\n :return: RollingUpgradeStatusInfo, or the result of cls(response)\n :rtype: ~azure.mgmt.compute.v2021_07_01.models.RollingUpgradeStatusInfo\n :raises: ~azure.core.exceptions.HttpResponseError\n '
cls = kwargs.pop('cls', None)
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', '2021-07-01')
request = build_get_latest_request(resource_group_name=resource_group_name, vm_scale_set_name=vm_scale_set_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.get_latest.metadata['url'])
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if (response.status_code not in [200]):
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RollingUpgradeStatusInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
|
@undoc
def softspace(file, newvalue):
'Copied from code.py, to remove the dependency'
oldvalue = 0
try:
oldvalue = file.softspace
except AttributeError:
pass
try:
file.softspace = newvalue
except (AttributeError, TypeError):
pass
return oldvalue
| -5,996,212,998,271,666,000
|
Copied from code.py, to remove the dependency
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
softspace
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
@undoc
def softspace(file, newvalue):
oldvalue = 0
try:
oldvalue = file.softspace
except AttributeError:
pass
try:
file.softspace = newvalue
except (AttributeError, TypeError):
pass
return oldvalue
|
def raise_error(self):
'Reraises error if `success` is `False`, otherwise does nothing'
if (self.error_before_exec is not None):
raise self.error_before_exec
if (self.error_in_exec is not None):
raise self.error_in_exec
| 7,432,556,278,080,430,000
|
Reraises error if `success` is `False`, otherwise does nothing
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
raise_error
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def raise_error(self):
if (self.error_before_exec is not None):
raise self.error_before_exec
if (self.error_in_exec is not None):
raise self.error_in_exec
|
@property
def input_splitter(self):
'Make this available for backward compatibility (pre-7.0 release) with existing code.\n\n For example, ipykernel ipykernel currently uses\n `shell.input_splitter.check_complete`\n '
from warnings import warn
warn('`input_splitter` is deprecated since IPython 7.0, prefer `input_transformer_manager`.', DeprecationWarning, stacklevel=2)
return self.input_transformer_manager
| -7,653,960,977,976,236,000
|
Make this available for backward compatibility (pre-7.0 release) with existing code.
For example, ipykernel ipykernel currently uses
`shell.input_splitter.check_complete`
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
input_splitter
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
@property
def input_splitter(self):
'Make this available for backward compatibility (pre-7.0 release) with existing code.\n\n For example, ipykernel ipykernel currently uses\n `shell.input_splitter.check_complete`\n '
from warnings import warn
warn('`input_splitter` is deprecated since IPython 7.0, prefer `input_transformer_manager`.', DeprecationWarning, stacklevel=2)
return self.input_transformer_manager
|
def get_ipython(self):
'Return the currently running IPython instance.'
return self
| -5,054,242,317,332,809,000
|
Return the currently running IPython instance.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
get_ipython
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def get_ipython(self):
return self
|
def set_autoindent(self, value=None):
'Set the autoindent flag.\n\n If called with no arguments, it acts as a toggle.'
if (value is None):
self.autoindent = (not self.autoindent)
else:
self.autoindent = value
| -8,217,420,294,068,633,000
|
Set the autoindent flag.
If called with no arguments, it acts as a toggle.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
set_autoindent
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def set_autoindent(self, value=None):
'Set the autoindent flag.\n\n If called with no arguments, it acts as a toggle.'
if (value is None):
self.autoindent = (not self.autoindent)
else:
self.autoindent = value
|
def init_environment(self):
"Any changes we need to make to the user's environment."
pass
| 3,436,820,746,371,780,600
|
Any changes we need to make to the user's environment.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
init_environment
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def init_environment(self):
pass
|
def init_logstart(self):
'Initialize logging in case it was requested at the command line.\n '
if self.logappend:
self.magic(('logstart %s append' % self.logappend))
elif self.logfile:
self.magic(('logstart %s' % self.logfile))
elif self.logstart:
self.magic('logstart')
| -2,385,930,210,229,966,300
|
Initialize logging in case it was requested at the command line.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
init_logstart
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def init_logstart(self):
'\n '
if self.logappend:
self.magic(('logstart %s append' % self.logappend))
elif self.logfile:
self.magic(('logstart %s' % self.logfile))
elif self.logstart:
self.magic('logstart')
|
@staticmethod
def get_path_links(p: Path):
'Gets path links including all symlinks\n\n Examples\n --------\n In [1]: from IPython.core.interactiveshell import InteractiveShell\n\n In [2]: import sys, pathlib\n\n In [3]: paths = InteractiveShell.get_path_links(pathlib.Path(sys.executable))\n\n In [4]: len(paths) == len(set(paths))\n Out[4]: True\n\n In [5]: bool(paths)\n Out[5]: True\n '
paths = [p]
while p.is_symlink():
new_path = Path(os.readlink(p))
if (not new_path.is_absolute()):
new_path = (p.parent / new_path)
p = new_path
paths.append(p)
return paths
| -1,158,422,851,321,546,800
|
Gets path links including all symlinks
Examples
--------
In [1]: from IPython.core.interactiveshell import InteractiveShell
In [2]: import sys, pathlib
In [3]: paths = InteractiveShell.get_path_links(pathlib.Path(sys.executable))
In [4]: len(paths) == len(set(paths))
Out[4]: True
In [5]: bool(paths)
Out[5]: True
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
get_path_links
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
@staticmethod
def get_path_links(p: Path):
'Gets path links including all symlinks\n\n Examples\n --------\n In [1]: from IPython.core.interactiveshell import InteractiveShell\n\n In [2]: import sys, pathlib\n\n In [3]: paths = InteractiveShell.get_path_links(pathlib.Path(sys.executable))\n\n In [4]: len(paths) == len(set(paths))\n Out[4]: True\n\n In [5]: bool(paths)\n Out[5]: True\n '
paths = [p]
while p.is_symlink():
new_path = Path(os.readlink(p))
if (not new_path.is_absolute()):
new_path = (p.parent / new_path)
p = new_path
paths.append(p)
return paths
|
def init_virtualenv(self):
"Add the current virtualenv to sys.path so the user can import modules from it.\n This isn't perfect: it doesn't use the Python interpreter with which the\n virtualenv was built, and it ignores the --no-site-packages option. A\n warning will appear suggesting the user installs IPython in the\n virtualenv, but for many cases, it probably works well enough.\n\n Adapted from code snippets online.\n\n http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv\n "
if ('VIRTUAL_ENV' not in os.environ):
return
elif (os.environ['VIRTUAL_ENV'] == ''):
warn("Virtual env path set to '', please check if this is intended.")
return
p = Path(sys.executable)
p_venv = Path(os.environ['VIRTUAL_ENV'])
paths = self.get_path_links(p)
if (p_venv.parts[1] == 'cygdrive'):
drive_name = p_venv.parts[2]
p_venv = ((drive_name + ':/') / Path(*p_venv.parts[3:]))
if any(((p_venv == p.parents[1]) for p in paths)):
return
if (sys.platform == 'win32'):
virtual_env = str(Path(os.environ['VIRTUAL_ENV'], 'Lib', 'site-packages'))
else:
virtual_env_path = Path(os.environ['VIRTUAL_ENV'], 'lib', 'python{}.{}', 'site-packages')
p_ver = sys.version_info[:2]
re_m = re.search('\\bpy(?:thon)?([23])\\.(\\d+)\\b', os.environ['VIRTUAL_ENV'])
if re_m:
predicted_path = Path(str(virtual_env_path).format(*re_m.groups()))
if predicted_path.exists():
p_ver = re_m.groups()
virtual_env = str(virtual_env_path).format(*p_ver)
warn('Attempting to work in a virtualenv. If you encounter problems, please install IPython inside the virtualenv.')
import site
sys.path.insert(0, virtual_env)
site.addsitedir(virtual_env)
| -1,645,497,003,472,402,400
|
Add the current virtualenv to sys.path so the user can import modules from it.
This isn't perfect: it doesn't use the Python interpreter with which the
virtualenv was built, and it ignores the --no-site-packages option. A
warning will appear suggesting the user installs IPython in the
virtualenv, but for many cases, it probably works well enough.
Adapted from code snippets online.
http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
init_virtualenv
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def init_virtualenv(self):
"Add the current virtualenv to sys.path so the user can import modules from it.\n This isn't perfect: it doesn't use the Python interpreter with which the\n virtualenv was built, and it ignores the --no-site-packages option. A\n warning will appear suggesting the user installs IPython in the\n virtualenv, but for many cases, it probably works well enough.\n\n Adapted from code snippets online.\n\n http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv\n "
if ('VIRTUAL_ENV' not in os.environ):
return
elif (os.environ['VIRTUAL_ENV'] == ):
warn("Virtual env path set to , please check if this is intended.")
return
p = Path(sys.executable)
p_venv = Path(os.environ['VIRTUAL_ENV'])
paths = self.get_path_links(p)
if (p_venv.parts[1] == 'cygdrive'):
drive_name = p_venv.parts[2]
p_venv = ((drive_name + ':/') / Path(*p_venv.parts[3:]))
if any(((p_venv == p.parents[1]) for p in paths)):
return
if (sys.platform == 'win32'):
virtual_env = str(Path(os.environ['VIRTUAL_ENV'], 'Lib', 'site-packages'))
else:
virtual_env_path = Path(os.environ['VIRTUAL_ENV'], 'lib', 'python{}.{}', 'site-packages')
p_ver = sys.version_info[:2]
re_m = re.search('\\bpy(?:thon)?([23])\\.(\\d+)\\b', os.environ['VIRTUAL_ENV'])
if re_m:
predicted_path = Path(str(virtual_env_path).format(*re_m.groups()))
if predicted_path.exists():
p_ver = re_m.groups()
virtual_env = str(virtual_env_path).format(*p_ver)
warn('Attempting to work in a virtualenv. If you encounter problems, please install IPython inside the virtualenv.')
import site
sys.path.insert(0, virtual_env)
site.addsitedir(virtual_env)
|
def save_sys_module_state(self):
'Save the state of hooks in the sys module.\n\n This has to be called after self.user_module is created.\n '
self._orig_sys_module_state = {'stdin': sys.stdin, 'stdout': sys.stdout, 'stderr': sys.stderr, 'excepthook': sys.excepthook}
self._orig_sys_modules_main_name = self.user_module.__name__
self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
| 8,436,972,721,409,837,000
|
Save the state of hooks in the sys module.
This has to be called after self.user_module is created.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
save_sys_module_state
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def save_sys_module_state(self):
'Save the state of hooks in the sys module.\n\n This has to be called after self.user_module is created.\n '
self._orig_sys_module_state = {'stdin': sys.stdin, 'stdout': sys.stdout, 'stderr': sys.stderr, 'excepthook': sys.excepthook}
self._orig_sys_modules_main_name = self.user_module.__name__
self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
|
def restore_sys_module_state(self):
'Restore the state of the sys module.'
try:
for (k, v) in self._orig_sys_module_state.items():
setattr(sys, k, v)
except AttributeError:
pass
if (self._orig_sys_modules_main_mod is not None):
sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
| -2,300,663,245,410,239,700
|
Restore the state of the sys module.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
restore_sys_module_state
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def restore_sys_module_state(self):
try:
for (k, v) in self._orig_sys_module_state.items():
setattr(sys, k, v)
except AttributeError:
pass
if (self._orig_sys_modules_main_mod is not None):
sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
|
def set_hook(self, name, hook, priority=50, str_key=None, re_key=None):
"set_hook(name,hook) -> sets an internal IPython hook.\n\n IPython exposes some of its internal API as user-modifiable hooks. By\n adding your function to one of these hooks, you can modify IPython's\n behavior to call at runtime your own routines."
f = types.MethodType(hook, self)
if (str_key is not None):
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_s(str_key, f, priority)
self.strdispatchers[name] = sdp
return
if (re_key is not None):
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_re(re.compile(re_key), f, priority)
self.strdispatchers[name] = sdp
return
dp = getattr(self.hooks, name, None)
if (name not in IPython.core.hooks.__all__):
print(("Warning! Hook '%s' is not one of %s" % (name, IPython.core.hooks.__all__)))
if (name in IPython.core.hooks.deprecated):
alternative = IPython.core.hooks.deprecated[name]
raise ValueError('Hook {} has been deprecated since IPython 5.0. Use {} instead.'.format(name, alternative))
if (not dp):
dp = IPython.core.hooks.CommandChainDispatcher()
try:
dp.add(f, priority)
except AttributeError:
dp = f
setattr(self.hooks, name, dp)
| -1,604,421,436,809,402,000
|
set_hook(name,hook) -> sets an internal IPython hook.
IPython exposes some of its internal API as user-modifiable hooks. By
adding your function to one of these hooks, you can modify IPython's
behavior to call at runtime your own routines.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
set_hook
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def set_hook(self, name, hook, priority=50, str_key=None, re_key=None):
"set_hook(name,hook) -> sets an internal IPython hook.\n\n IPython exposes some of its internal API as user-modifiable hooks. By\n adding your function to one of these hooks, you can modify IPython's\n behavior to call at runtime your own routines."
f = types.MethodType(hook, self)
if (str_key is not None):
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_s(str_key, f, priority)
self.strdispatchers[name] = sdp
return
if (re_key is not None):
sdp = self.strdispatchers.get(name, StrDispatch())
sdp.add_re(re.compile(re_key), f, priority)
self.strdispatchers[name] = sdp
return
dp = getattr(self.hooks, name, None)
if (name not in IPython.core.hooks.__all__):
print(("Warning! Hook '%s' is not one of %s" % (name, IPython.core.hooks.__all__)))
if (name in IPython.core.hooks.deprecated):
alternative = IPython.core.hooks.deprecated[name]
raise ValueError('Hook {} has been deprecated since IPython 5.0. Use {} instead.'.format(name, alternative))
if (not dp):
dp = IPython.core.hooks.CommandChainDispatcher()
try:
dp.add(f, priority)
except AttributeError:
dp = f
setattr(self.hooks, name, dp)
|
def register_post_execute(self, func):
"DEPRECATED: Use ip.events.register('post_run_cell', func)\n\n Register a function for calling after code execution.\n "
raise ValueError("ip.register_post_execute is deprecated since IPython 1.0, use ip.events.register('post_run_cell', func) instead.")
| -6,596,494,135,322,957,000
|
DEPRECATED: Use ip.events.register('post_run_cell', func)
Register a function for calling after code execution.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
register_post_execute
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def register_post_execute(self, func):
"DEPRECATED: Use ip.events.register('post_run_cell', func)\n\n Register a function for calling after code execution.\n "
raise ValueError("ip.register_post_execute is deprecated since IPython 1.0, use ip.events.register('post_run_cell', func) instead.")
|
def new_main_mod(self, filename, modname):
"Return a new 'main' module object for user code execution.\n\n ``filename`` should be the path of the script which will be run in the\n module. Requests with the same filename will get the same module, with\n its namespace cleared.\n\n ``modname`` should be the module name - normally either '__main__' or\n the basename of the file without the extension.\n\n When scripts are executed via %run, we must keep a reference to their\n __main__ module around so that Python doesn't\n clear it, rendering references to module globals useless.\n\n This method keeps said reference in a private dict, keyed by the\n absolute path of the script. This way, for multiple executions of the\n same script we only keep one copy of the namespace (the last one),\n thus preventing memory leaks from old references while allowing the\n objects from the last execution to be accessible.\n "
filename = os.path.abspath(filename)
try:
main_mod = self._main_mod_cache[filename]
except KeyError:
main_mod = self._main_mod_cache[filename] = types.ModuleType(modname, doc='Module created for script run in IPython')
else:
main_mod.__dict__.clear()
main_mod.__name__ = modname
main_mod.__file__ = filename
main_mod.__nonzero__ = (lambda : True)
return main_mod
| 1,946,551,123,751,409,700
|
Return a new 'main' module object for user code execution.
``filename`` should be the path of the script which will be run in the
module. Requests with the same filename will get the same module, with
its namespace cleared.
``modname`` should be the module name - normally either '__main__' or
the basename of the file without the extension.
When scripts are executed via %run, we must keep a reference to their
__main__ module around so that Python doesn't
clear it, rendering references to module globals useless.
This method keeps said reference in a private dict, keyed by the
absolute path of the script. This way, for multiple executions of the
same script we only keep one copy of the namespace (the last one),
thus preventing memory leaks from old references while allowing the
objects from the last execution to be accessible.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
new_main_mod
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def new_main_mod(self, filename, modname):
"Return a new 'main' module object for user code execution.\n\n ``filename`` should be the path of the script which will be run in the\n module. Requests with the same filename will get the same module, with\n its namespace cleared.\n\n ``modname`` should be the module name - normally either '__main__' or\n the basename of the file without the extension.\n\n When scripts are executed via %run, we must keep a reference to their\n __main__ module around so that Python doesn't\n clear it, rendering references to module globals useless.\n\n This method keeps said reference in a private dict, keyed by the\n absolute path of the script. This way, for multiple executions of the\n same script we only keep one copy of the namespace (the last one),\n thus preventing memory leaks from old references while allowing the\n objects from the last execution to be accessible.\n "
filename = os.path.abspath(filename)
try:
main_mod = self._main_mod_cache[filename]
except KeyError:
main_mod = self._main_mod_cache[filename] = types.ModuleType(modname, doc='Module created for script run in IPython')
else:
main_mod.__dict__.clear()
main_mod.__name__ = modname
main_mod.__file__ = filename
main_mod.__nonzero__ = (lambda : True)
return main_mod
|
def clear_main_mod_cache(self):
"Clear the cache of main modules.\n\n Mainly for use by utilities like %reset.\n\n Examples\n --------\n In [15]: import IPython\n\n In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')\n\n In [17]: len(_ip._main_mod_cache) > 0\n Out[17]: True\n\n In [18]: _ip.clear_main_mod_cache()\n\n In [19]: len(_ip._main_mod_cache) == 0\n Out[19]: True\n "
self._main_mod_cache.clear()
| 4,561,598,257,768,678,000
|
Clear the cache of main modules.
Mainly for use by utilities like %reset.
Examples
--------
In [15]: import IPython
In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')
In [17]: len(_ip._main_mod_cache) > 0
Out[17]: True
In [18]: _ip.clear_main_mod_cache()
In [19]: len(_ip._main_mod_cache) == 0
Out[19]: True
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
clear_main_mod_cache
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def clear_main_mod_cache(self):
"Clear the cache of main modules.\n\n Mainly for use by utilities like %reset.\n\n Examples\n --------\n In [15]: import IPython\n\n In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')\n\n In [17]: len(_ip._main_mod_cache) > 0\n Out[17]: True\n\n In [18]: _ip.clear_main_mod_cache()\n\n In [19]: len(_ip._main_mod_cache) == 0\n Out[19]: True\n "
self._main_mod_cache.clear()
|
def debugger(self, force=False):
"Call the pdb debugger.\n\n Keywords:\n\n - force(False): by default, this routine checks the instance call_pdb\n flag and does not actually invoke the debugger if the flag is false.\n The 'force' option forces the debugger to activate even if the flag\n is false.\n "
if (not (force or self.call_pdb)):
return
if (not hasattr(sys, 'last_traceback')):
error('No traceback has been produced, nothing to debug.')
return
self.InteractiveTB.debugger(force=True)
| 3,130,626,380,053,997,600
|
Call the pdb debugger.
Keywords:
- force(False): by default, this routine checks the instance call_pdb
flag and does not actually invoke the debugger if the flag is false.
The 'force' option forces the debugger to activate even if the flag
is false.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
debugger
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def debugger(self, force=False):
"Call the pdb debugger.\n\n Keywords:\n\n - force(False): by default, this routine checks the instance call_pdb\n flag and does not actually invoke the debugger if the flag is false.\n The 'force' option forces the debugger to activate even if the flag\n is false.\n "
if (not (force or self.call_pdb)):
return
if (not hasattr(sys, 'last_traceback')):
error('No traceback has been produced, nothing to debug.')
return
self.InteractiveTB.debugger(force=True)
|
def prepare_user_module(self, user_module=None, user_ns=None):
'Prepare the module and namespace in which user code will be run.\n\n When IPython is started normally, both parameters are None: a new module\n is created automatically, and its __dict__ used as the namespace.\n\n If only user_module is provided, its __dict__ is used as the namespace.\n If only user_ns is provided, a dummy module is created, and user_ns\n becomes the global namespace. If both are provided (as they may be\n when embedding), user_ns is the local namespace, and user_module\n provides the global namespace.\n\n Parameters\n ----------\n user_module : module, optional\n The current user module in which IPython is being run. If None,\n a clean module will be created.\n user_ns : dict, optional\n A namespace in which to run interactive commands.\n\n Returns\n -------\n A tuple of user_module and user_ns, each properly initialised.\n '
if ((user_module is None) and (user_ns is not None)):
user_ns.setdefault('__name__', '__main__')
user_module = DummyMod()
user_module.__dict__ = user_ns
if (user_module is None):
user_module = types.ModuleType('__main__', doc='Automatically created module for IPython interactive environment')
user_module.__dict__.setdefault('__builtin__', builtin_mod)
user_module.__dict__.setdefault('__builtins__', builtin_mod)
if (user_ns is None):
user_ns = user_module.__dict__
return (user_module, user_ns)
| 4,863,675,843,523,326,000
|
Prepare the module and namespace in which user code will be run.
When IPython is started normally, both parameters are None: a new module
is created automatically, and its __dict__ used as the namespace.
If only user_module is provided, its __dict__ is used as the namespace.
If only user_ns is provided, a dummy module is created, and user_ns
becomes the global namespace. If both are provided (as they may be
when embedding), user_ns is the local namespace, and user_module
provides the global namespace.
Parameters
----------
user_module : module, optional
The current user module in which IPython is being run. If None,
a clean module will be created.
user_ns : dict, optional
A namespace in which to run interactive commands.
Returns
-------
A tuple of user_module and user_ns, each properly initialised.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
prepare_user_module
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def prepare_user_module(self, user_module=None, user_ns=None):
'Prepare the module and namespace in which user code will be run.\n\n When IPython is started normally, both parameters are None: a new module\n is created automatically, and its __dict__ used as the namespace.\n\n If only user_module is provided, its __dict__ is used as the namespace.\n If only user_ns is provided, a dummy module is created, and user_ns\n becomes the global namespace. If both are provided (as they may be\n when embedding), user_ns is the local namespace, and user_module\n provides the global namespace.\n\n Parameters\n ----------\n user_module : module, optional\n The current user module in which IPython is being run. If None,\n a clean module will be created.\n user_ns : dict, optional\n A namespace in which to run interactive commands.\n\n Returns\n -------\n A tuple of user_module and user_ns, each properly initialised.\n '
if ((user_module is None) and (user_ns is not None)):
user_ns.setdefault('__name__', '__main__')
user_module = DummyMod()
user_module.__dict__ = user_ns
if (user_module is None):
user_module = types.ModuleType('__main__', doc='Automatically created module for IPython interactive environment')
user_module.__dict__.setdefault('__builtin__', builtin_mod)
user_module.__dict__.setdefault('__builtins__', builtin_mod)
if (user_ns is None):
user_ns = user_module.__dict__
return (user_module, user_ns)
|
def init_user_ns(self):
'Initialize all user-visible namespaces to their minimum defaults.\n\n Certain history lists are also initialized here, as they effectively\n act as user namespaces.\n\n Notes\n -----\n All data structures here are only filled in, they are NOT reset by this\n method. If they were not empty before, data will simply be added to\n them.\n '
ns = {}
ns['_ih'] = self.history_manager.input_hist_parsed
ns['_oh'] = self.history_manager.output_hist
ns['_dh'] = self.history_manager.dir_hist
ns['In'] = self.history_manager.input_hist_parsed
ns['Out'] = self.history_manager.output_hist
ns['get_ipython'] = self.get_ipython
ns['exit'] = self.exiter
ns['quit'] = self.exiter
self.user_ns_hidden.update(ns)
self.user_ns.update(ns)
| -6,793,962,417,877,935,000
|
Initialize all user-visible namespaces to their minimum defaults.
Certain history lists are also initialized here, as they effectively
act as user namespaces.
Notes
-----
All data structures here are only filled in, they are NOT reset by this
method. If they were not empty before, data will simply be added to
them.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
init_user_ns
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def init_user_ns(self):
'Initialize all user-visible namespaces to their minimum defaults.\n\n Certain history lists are also initialized here, as they effectively\n act as user namespaces.\n\n Notes\n -----\n All data structures here are only filled in, they are NOT reset by this\n method. If they were not empty before, data will simply be added to\n them.\n '
ns = {}
ns['_ih'] = self.history_manager.input_hist_parsed
ns['_oh'] = self.history_manager.output_hist
ns['_dh'] = self.history_manager.dir_hist
ns['In'] = self.history_manager.input_hist_parsed
ns['Out'] = self.history_manager.output_hist
ns['get_ipython'] = self.get_ipython
ns['exit'] = self.exiter
ns['quit'] = self.exiter
self.user_ns_hidden.update(ns)
self.user_ns.update(ns)
|
@property
def all_ns_refs(self):
'Get a list of references to all the namespace dictionaries in which\n IPython might store a user-created object.\n\n Note that this does not include the displayhook, which also caches\n objects from the output.'
return ([self.user_ns, self.user_global_ns, self.user_ns_hidden] + [m.__dict__ for m in self._main_mod_cache.values()])
| -6,780,696,099,578,258,000
|
Get a list of references to all the namespace dictionaries in which
IPython might store a user-created object.
Note that this does not include the displayhook, which also caches
objects from the output.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
all_ns_refs
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
@property
def all_ns_refs(self):
'Get a list of references to all the namespace dictionaries in which\n IPython might store a user-created object.\n\n Note that this does not include the displayhook, which also caches\n objects from the output.'
return ([self.user_ns, self.user_global_ns, self.user_ns_hidden] + [m.__dict__ for m in self._main_mod_cache.values()])
|
def reset(self, new_session=True, aggressive=False):
'Clear all internal namespaces, and attempt to release references to\n user objects.\n\n If new_session is True, a new history session will be opened.\n '
self.history_manager.reset(new_session)
if new_session:
self.execution_count = 1
self.last_execution_succeeded = True
self.last_execution_result = None
if self.displayhook.do_full_cache:
self.displayhook.flush()
if (self.user_ns is not self.user_global_ns):
self.user_ns.clear()
ns = self.user_global_ns
drop_keys = set(ns.keys())
drop_keys.discard('__builtin__')
drop_keys.discard('__builtins__')
drop_keys.discard('__name__')
for k in drop_keys:
del ns[k]
self.user_ns_hidden.clear()
self.init_user_ns()
if (aggressive and (not hasattr(self, '_sys_modules_keys'))):
print('Cannot restore sys.module, no snapshot')
elif aggressive:
print('culling sys module...')
current_keys = set(sys.modules.keys())
for k in (current_keys - self._sys_modules_keys):
if k.startswith('multiprocessing'):
continue
del sys.modules[k]
self.alias_manager.clear_aliases()
self.alias_manager.init_aliases()
if (os.name == 'posix'):
for cmd in ('clear', 'more', 'less', 'man'):
if (cmd not in self.magics_manager.magics['line']):
self.alias_manager.soft_define_alias(cmd, cmd)
self.clear_main_mod_cache()
| -6,565,744,993,413,206,000
|
Clear all internal namespaces, and attempt to release references to
user objects.
If new_session is True, a new history session will be opened.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
reset
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def reset(self, new_session=True, aggressive=False):
'Clear all internal namespaces, and attempt to release references to\n user objects.\n\n If new_session is True, a new history session will be opened.\n '
self.history_manager.reset(new_session)
if new_session:
self.execution_count = 1
self.last_execution_succeeded = True
self.last_execution_result = None
if self.displayhook.do_full_cache:
self.displayhook.flush()
if (self.user_ns is not self.user_global_ns):
self.user_ns.clear()
ns = self.user_global_ns
drop_keys = set(ns.keys())
drop_keys.discard('__builtin__')
drop_keys.discard('__builtins__')
drop_keys.discard('__name__')
for k in drop_keys:
del ns[k]
self.user_ns_hidden.clear()
self.init_user_ns()
if (aggressive and (not hasattr(self, '_sys_modules_keys'))):
print('Cannot restore sys.module, no snapshot')
elif aggressive:
print('culling sys module...')
current_keys = set(sys.modules.keys())
for k in (current_keys - self._sys_modules_keys):
if k.startswith('multiprocessing'):
continue
del sys.modules[k]
self.alias_manager.clear_aliases()
self.alias_manager.init_aliases()
if (os.name == 'posix'):
for cmd in ('clear', 'more', 'less', 'man'):
if (cmd not in self.magics_manager.magics['line']):
self.alias_manager.soft_define_alias(cmd, cmd)
self.clear_main_mod_cache()
|
def del_var(self, varname, by_name=False):
"Delete a variable from the various namespaces, so that, as\n far as possible, we're not keeping any hidden references to it.\n\n Parameters\n ----------\n varname : str\n The name of the variable to delete.\n by_name : bool\n If True, delete variables with the given name in each\n namespace. If False (default), find the variable in the user\n namespace, and delete references to it.\n "
if (varname in ('__builtin__', '__builtins__')):
raise ValueError(('Refusing to delete %s' % varname))
ns_refs = self.all_ns_refs
if by_name:
for ns in ns_refs:
try:
del ns[varname]
except KeyError:
pass
else:
try:
obj = self.user_ns[varname]
except KeyError as e:
raise NameError(("name '%s' is not defined" % varname)) from e
ns_refs.append(self.history_manager.output_hist)
for ns in ns_refs:
to_delete = [n for (n, o) in ns.items() if (o is obj)]
for name in to_delete:
del ns[name]
if (self.last_execution_result.result is obj):
self.last_execution_result = None
for name in ('_', '__', '___'):
if (getattr(self.displayhook, name) is obj):
setattr(self.displayhook, name, None)
| -5,895,738,298,108,169,000
|
Delete a variable from the various namespaces, so that, as
far as possible, we're not keeping any hidden references to it.
Parameters
----------
varname : str
The name of the variable to delete.
by_name : bool
If True, delete variables with the given name in each
namespace. If False (default), find the variable in the user
namespace, and delete references to it.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
del_var
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def del_var(self, varname, by_name=False):
"Delete a variable from the various namespaces, so that, as\n far as possible, we're not keeping any hidden references to it.\n\n Parameters\n ----------\n varname : str\n The name of the variable to delete.\n by_name : bool\n If True, delete variables with the given name in each\n namespace. If False (default), find the variable in the user\n namespace, and delete references to it.\n "
if (varname in ('__builtin__', '__builtins__')):
raise ValueError(('Refusing to delete %s' % varname))
ns_refs = self.all_ns_refs
if by_name:
for ns in ns_refs:
try:
del ns[varname]
except KeyError:
pass
else:
try:
obj = self.user_ns[varname]
except KeyError as e:
raise NameError(("name '%s' is not defined" % varname)) from e
ns_refs.append(self.history_manager.output_hist)
for ns in ns_refs:
to_delete = [n for (n, o) in ns.items() if (o is obj)]
for name in to_delete:
del ns[name]
if (self.last_execution_result.result is obj):
self.last_execution_result = None
for name in ('_', '__', '___'):
if (getattr(self.displayhook, name) is obj):
setattr(self.displayhook, name, None)
|
def reset_selective(self, regex=None):
'Clear selective variables from internal namespaces based on a\n specified regular expression.\n\n Parameters\n ----------\n regex : string or compiled pattern, optional\n A regular expression pattern that will be used in searching\n variable names in the users namespaces.\n '
if (regex is not None):
try:
m = re.compile(regex)
except TypeError as e:
raise TypeError('regex must be a string or compiled pattern') from e
for ns in self.all_ns_refs:
for var in ns:
if m.search(var):
del ns[var]
| 3,842,207,063,457,542,000
|
Clear selective variables from internal namespaces based on a
specified regular expression.
Parameters
----------
regex : string or compiled pattern, optional
A regular expression pattern that will be used in searching
variable names in the users namespaces.
|
venv/lib/python3.9/site-packages/IPython/core/interactiveshell.py
|
reset_selective
|
CMU-IDS-2022/final-project-the-evaluators
|
python
|
def reset_selective(self, regex=None):
'Clear selective variables from internal namespaces based on a\n specified regular expression.\n\n Parameters\n ----------\n regex : string or compiled pattern, optional\n A regular expression pattern that will be used in searching\n variable names in the users namespaces.\n '
if (regex is not None):
try:
m = re.compile(regex)
except TypeError as e:
raise TypeError('regex must be a string or compiled pattern') from e
for ns in self.all_ns_refs:
for var in ns:
if m.search(var):
del ns[var]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.