idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
21,100
def is_primary ( self ) : return bool ( next ( iter ( self . selfsig . _signature . subpackets [ 'h_PrimaryUserID' ] ) , False ) )
If the most recent valid self - signature specifies this as being primary this will be True . Otherwise Faqlse .
21,101
def selfsig ( self ) : if self . parent is not None : return next ( ( sig for sig in reversed ( self . _signatures ) if sig . signer == self . parent . fingerprint . keyid ) , None )
This will be the most recent self - signature of this User ID or Attribute . If there isn t one this will be None .
21,102
def new ( cls , pn , comment = "" , email = "" ) : uid = PGPUID ( ) if isinstance ( pn , bytearray ) : uid . _uid = UserAttribute ( ) uid . _uid . image . image = pn uid . _uid . image . iencoding = ImageEncoding . encodingof ( pn ) uid . _uid . update_hlen ( ) else : uid . _uid = UserID ( ) uid . _uid . name = pn uid . _uid . comment = comment uid . _uid . email = email uid . _uid . update_hlen ( ) return uid
Create a new User ID or photo .
21,103
def message ( self ) : if self . type == 'cleartext' : return self . bytes_to_text ( self . _message ) if self . type == 'literal' : return self . _message . contents if self . type == 'encrypted' : return self . _message
The message contents
21,104
def new ( cls , message , ** kwargs ) : cleartext = kwargs . pop ( 'cleartext' , False ) format = kwargs . pop ( 'format' , None ) sensitive = kwargs . pop ( 'sensitive' , False ) compression = kwargs . pop ( 'compression' , CompressionAlgorithm . ZIP ) file = kwargs . pop ( 'file' , False ) charset = kwargs . pop ( 'encoding' , None ) filename = '' mtime = datetime . utcnow ( ) msg = PGPMessage ( ) if charset : msg . charset = charset if file and os . path . isfile ( message ) : filename = message message = bytearray ( os . path . getsize ( filename ) ) mtime = datetime . utcfromtimestamp ( os . path . getmtime ( filename ) ) with open ( filename , 'rb' ) as mf : mf . readinto ( message ) if format is None : if isinstance ( message , six . text_type ) : format = 'u' elif cls . is_ascii ( message ) : format = 't' else : format = 'b' if isinstance ( message , ( six . binary_type , bytearray ) ) and ( cleartext or format in 'tu' ) : message = message . decode ( charset or 'utf-8' ) if cleartext : msg |= message else : lit = LiteralData ( ) lit . _contents = bytearray ( msg . text_to_bytes ( message ) ) lit . filename = '_CONSOLE' if sensitive else os . path . basename ( filename ) lit . mtime = mtime lit . format = format lit . update_hlen ( ) msg |= lit msg . _compression = compression return msg
Create a new PGPMessage object .
21,105
def decrypt ( self , passphrase ) : if not self . is_encrypted : raise PGPError ( "This message is not encrypted!" ) for skesk in iter ( sk for sk in self . _sessionkeys if isinstance ( sk , SKESessionKey ) ) : try : symalg , key = skesk . decrypt_sk ( passphrase ) decmsg = PGPMessage ( ) decmsg . parse ( self . message . decrypt ( key , symalg ) ) except ( TypeError , ValueError , NotImplementedError , PGPDecryptionError ) : continue else : del passphrase break else : raise PGPDecryptionError ( "Decryption failed" ) return decmsg
Attempt to decrypt this message using a passphrase .
21,106
def is_expired ( self ) : expires = self . expires_at if expires is not None : return expires <= datetime . utcnow ( ) return False
True if this key is expired otherwise False
21,107
def is_primary ( self ) : return isinstance ( self . _key , Primary ) and not isinstance ( self . _key , Sub )
True if this is a primary key ; False if this is a subkey
21,108
def is_public ( self ) : return isinstance ( self . _key , Public ) and not isinstance ( self . _key , Private )
True if this is a public key otherwise False
21,109
def is_unlocked ( self ) : if self . is_public : return True if not self . is_protected : return True return self . _key . unlocked
False if this is a private key that is protected with a passphrase and has not yet been unlocked otherwise True
21,110
def new ( cls , key_algorithm , key_size ) : key = PGPKey ( ) if key_algorithm in { PubKeyAlgorithm . RSAEncrypt , PubKeyAlgorithm . RSASign } : warnings . warn ( '{:s} is deprecated - generating key using RSAEncryptOrSign' . format ( key_algorithm . name ) ) key_algorithm = PubKeyAlgorithm . RSAEncryptOrSign key . _key = PrivKeyV4 . new ( key_algorithm , key_size ) return key
Generate a new PGP key
21,111
def protect ( self , passphrase , enc_alg , hash_alg ) : if self . is_public : warnings . warn ( "Public keys cannot be passphrase-protected" , stacklevel = 2 ) return if self . is_protected and not self . is_unlocked : warnings . warn ( "This key is already protected with a passphrase - " "please unlock it before attempting to specify a new passphrase" , stacklevel = 2 ) return for sk in itertools . chain ( [ self ] , self . subkeys . values ( ) ) : sk . _key . protect ( passphrase , enc_alg , hash_alg ) del passphrase
Add a passphrase to a private key . If the key is already passphrase protected it should be unlocked before a new passphrase can be specified .
21,112
def unlock ( self , passphrase ) : if self . is_public : warnings . warn ( "Public keys cannot be passphrase-protected" , stacklevel = 3 ) yield self return if not self . is_protected : warnings . warn ( "This key is not protected with a passphrase" , stacklevel = 3 ) yield self return try : for sk in itertools . chain ( [ self ] , self . subkeys . values ( ) ) : sk . _key . unprotect ( passphrase ) del passphrase yield self finally : for sk in itertools . chain ( [ self ] , self . subkeys . values ( ) ) : sk . _key . keymaterial . clear ( )
Context manager method for unlocking passphrase - protected private keys . Has no effect if the key is not both private and passphrase - protected .
21,113
def add_uid ( self , uid , selfsign = True , ** prefs ) : uid . _parent = self if selfsign : uid |= self . certify ( uid , SignatureType . Positive_Cert , ** prefs ) self |= uid
Add a User ID to this key .
21,114
def get_uid ( self , search ) : if self . is_primary : return next ( ( u for u in self . _uids if search in filter ( lambda a : a is not None , ( u . name , u . comment , u . email ) ) ) , None ) return self . parent . get_uid ( search )
Find and return a User ID that matches the search string given .
21,115
def sign ( self , subject , ** prefs ) : sig_type = SignatureType . BinaryDocument hash_algo = prefs . pop ( 'hash' , None ) if subject is None : sig_type = SignatureType . Timestamp if isinstance ( subject , PGPMessage ) : if subject . type == 'cleartext' : sig_type = SignatureType . CanonicalDocument subject = subject . message sig = PGPSignature . new ( sig_type , self . key_algorithm , hash_algo , self . fingerprint . keyid ) return self . _sign ( subject , sig , ** prefs )
Sign text a message or a timestamp using this key .
21,116
def revoke ( self , target , ** prefs ) : hash_algo = prefs . pop ( 'hash' , None ) if isinstance ( target , PGPUID ) : sig_type = SignatureType . CertRevocation elif isinstance ( target , PGPKey ) : if target . is_primary : sig_type = SignatureType . KeyRevocation else : sig_type = SignatureType . SubkeyRevocation else : raise TypeError sig = PGPSignature . new ( sig_type , self . key_algorithm , hash_algo , self . fingerprint . keyid ) reason = prefs . pop ( 'reason' , RevocationReason . NotSpecified ) comment = prefs . pop ( 'comment' , "" ) sig . _signature . subpackets . addnew ( 'ReasonForRevocation' , hashed = True , code = reason , string = comment ) return self . _sign ( target , sig , ** prefs )
Revoke a key a subkey or all current certification signatures of a User ID that were generated by this key so far .
21,117
def revoker ( self , revoker , ** prefs ) : hash_algo = prefs . pop ( 'hash' , None ) sig = PGPSignature . new ( SignatureType . DirectlyOnKey , self . key_algorithm , hash_algo , self . fingerprint . keyid ) sensitive = prefs . pop ( 'sensitive' , False ) keyclass = RevocationKeyClass . Normal | ( RevocationKeyClass . Sensitive if sensitive else 0x00 ) sig . _signature . subpackets . addnew ( 'RevocationKey' , hashed = True , algorithm = revoker . key_algorithm , fingerprint = revoker . fingerprint , keyclass = keyclass ) prefs [ 'revocable' ] = False return self . _sign ( self , sig , ** prefs )
Generate a signature that specifies another key as being valid for revoking this key .
21,118
def bind ( self , key , ** prefs ) : hash_algo = prefs . pop ( 'hash' , None ) if self . is_primary and not key . is_primary : sig_type = SignatureType . Subkey_Binding elif key . is_primary and not self . is_primary : sig_type = SignatureType . PrimaryKey_Binding else : raise PGPError sig = PGPSignature . new ( sig_type , self . key_algorithm , hash_algo , self . fingerprint . keyid ) if sig_type == SignatureType . Subkey_Binding : usage = prefs . pop ( 'usage' , None ) if usage is not None : sig . _signature . subpackets . addnew ( 'KeyFlags' , hashed = True , flags = usage ) if key . key_algorithm . can_sign : subkeyid = key . fingerprint . keyid esig = None if not key . is_public : esig = key . bind ( self ) elif subkeyid in self . subkeys : esig = self . subkeys [ subkeyid ] . bind ( self ) if esig is not None : sig . _signature . subpackets . addnew ( 'EmbeddedSignature' , hashed = False , _sig = esig . _signature ) return self . _sign ( key , sig , ** prefs )
Bind a subkey to this key .
21,119
def verify ( self , subject , signature = None ) : sspairs = [ ] if not isinstance ( subject , ( type ( None ) , PGPMessage , PGPKey , PGPUID , PGPSignature , six . string_types , bytes , bytearray ) ) : raise TypeError ( "Unexpected subject value: {:s}" . format ( str ( type ( subject ) ) ) ) if not isinstance ( signature , ( type ( None ) , PGPSignature ) ) : raise TypeError ( "Unexpected signature value: {:s}" . format ( str ( type ( signature ) ) ) ) def _filter_sigs ( sigs ) : _ids = { self . fingerprint . keyid } | set ( self . subkeys ) return [ sig for sig in sigs if sig . signer in _ids ] if signature is None : if isinstance ( subject , PGPMessage ) : sspairs += [ ( sig , subject . message ) for sig in _filter_sigs ( subject . signatures ) ] if isinstance ( subject , ( PGPUID , PGPKey ) ) : sspairs += [ ( sig , subject ) for sig in _filter_sigs ( subject . __sig__ ) ] if isinstance ( subject , PGPKey ) : sspairs += [ ( sig , uid ) for uid in subject . userids for sig in _filter_sigs ( uid . __sig__ ) ] sspairs += [ ( sig , ua ) for ua in subject . userattributes for sig in _filter_sigs ( ua . __sig__ ) ] sspairs += [ ( sig , subkey ) for subkey in subject . subkeys . values ( ) for sig in _filter_sigs ( subkey . __sig__ ) ] elif signature . signer in { self . fingerprint . keyid } | set ( self . subkeys ) : sspairs += [ ( signature , subject ) ] if len ( sspairs ) == 0 : raise PGPError ( "No signatures to verify" ) sigv = SignatureVerification ( ) for sig , subj in sspairs : if self . fingerprint . keyid != sig . signer and sig . signer in self . subkeys : warnings . warn ( "Signature was signed with this key's subkey: {:s}. " "Verifying with subkey..." . format ( sig . signer ) , stacklevel = 2 ) sigv &= self . subkeys [ sig . signer ] . verify ( subj , sig ) else : verified = self . _key . verify ( sig . hashdata ( subj ) , sig . __sig__ , getattr ( hashes , sig . hash_algorithm . name ) ( ) ) if verified is NotImplemented : raise NotImplementedError ( sig . key_algorithm ) sigv . add_sigsubj ( sig , self . fingerprint . keyid , subj , verified ) return sigv
Verify a subject with a signature using this key .
21,120
def encrypt ( self , message , sessionkey = None , ** prefs ) : user = prefs . pop ( 'user' , None ) uid = None if user is not None : uid = self . get_uid ( user ) else : uid = next ( iter ( self . userids ) , None ) if uid is None and self . parent is not None : uid = next ( iter ( self . parent . userids ) , None ) cipher_algo = prefs . pop ( 'cipher' , uid . selfsig . cipherprefs [ 0 ] ) if cipher_algo not in uid . selfsig . cipherprefs : warnings . warn ( "Selected symmetric algorithm not in key preferences" , stacklevel = 3 ) if message . is_compressed and message . _compression not in uid . selfsig . compprefs : warnings . warn ( "Selected compression algorithm not in key preferences" , stacklevel = 3 ) if sessionkey is None : sessionkey = cipher_algo . gen_key ( ) pkesk = PKESessionKeyV3 ( ) pkesk . encrypter = bytearray ( binascii . unhexlify ( self . fingerprint . keyid . encode ( 'latin-1' ) ) ) pkesk . pkalg = self . key_algorithm pkesk . encrypt_sk ( self . _key , cipher_algo , sessionkey ) if message . is_encrypted : _m = message else : _m = PGPMessage ( ) skedata = IntegrityProtectedSKEDataV1 ( ) skedata . encrypt ( sessionkey , cipher_algo , message . __bytes__ ( ) ) _m |= skedata _m |= pkesk return _m
Encrypt a PGPMessage using this key .
21,121
def decrypt ( self , message ) : if not message . is_encrypted : warnings . warn ( "This message is not encrypted" , stacklevel = 3 ) return message if self . fingerprint . keyid not in message . encrypters : sks = set ( self . subkeys ) mis = set ( message . encrypters ) if sks & mis : skid = list ( sks & mis ) [ 0 ] warnings . warn ( "Message was encrypted with this key's subkey: {:s}. " "Decrypting with that..." . format ( skid ) , stacklevel = 2 ) return self . subkeys [ skid ] . decrypt ( message ) raise PGPError ( "Cannot decrypt the provided message with this key" ) pkesk = next ( pk for pk in message . _sessionkeys if pk . pkalg == self . key_algorithm and pk . encrypter == self . fingerprint . keyid ) alg , key = pkesk . decrypt_sk ( self . _key ) decmsg = PGPMessage ( ) decmsg . parse ( message . message . decrypt ( key , alg ) ) return decmsg
Decrypt a PGPMessage using this key .
21,122
def load ( self , * args ) : def _preiter ( first , iterable ) : yield first for item in iterable : yield item loaded = set ( ) for key in iter ( item for ilist in iter ( ilist if isinstance ( ilist , ( tuple , list ) ) else [ ilist ] for ilist in args ) for item in ilist ) : if os . path . isfile ( key ) : _key , keys = PGPKey . from_file ( key ) else : _key , keys = PGPKey . from_blob ( key ) for ik in _preiter ( _key , keys . values ( ) ) : self . _add_key ( ik ) loaded |= { ik . fingerprint } | { isk . fingerprint for isk in ik . subkeys . values ( ) } return list ( loaded )
Load all keys provided into this keyring object .
21,123
def fingerprints ( self , keyhalf = 'any' , keytype = 'any' ) : return { pk . fingerprint for pk in self . _keys . values ( ) if pk . is_primary in [ True if keytype in [ 'primary' , 'any' ] else None , False if keytype in [ 'sub' , 'any' ] else None ] if pk . is_public in [ True if keyhalf in [ 'public' , 'any' ] else None , False if keyhalf in [ 'private' , 'any' ] else None ] }
List loaded fingerprints with some optional filtering .
21,124
def unload ( self , key ) : assert isinstance ( key , PGPKey ) pkid = id ( key ) if pkid in self . _keys : [ kd . remove ( pkid ) for kd in [ self . _pubkeys , self . _privkeys ] if pkid in kd ] self . _keys . pop ( pkid ) for m , a in [ ( m , a ) for m in self . _aliases for a , p in m . items ( ) if p == pkid ] : m . pop ( a ) if a in self : self . _sort_alias ( a ) if key . is_primary : [ self . unload ( sk ) for sk in key . subkeys . values ( ) ]
Unload a loaded key and its subkeys .
21,125
def parse ( self , packet ) : self . _lenfmt = ( ( packet [ 0 ] & 0x40 ) >> 6 ) self . tag = packet [ 0 ] if self . _lenfmt == 0 : self . llen = ( packet [ 0 ] & 0x03 ) del packet [ 0 ] if ( self . _lenfmt == 0 and self . llen > 0 ) or self . _lenfmt == 1 : self . length = packet else : self . length = len ( packet )
There are two formats for headers
21,126
def clear ( self ) : for field in self . __privfields__ : delattr ( self , field ) setattr ( self , field , MPI ( 0 ) )
delete and re - initialize all private components to zero
21,127
def ascii_unarmor ( text ) : m = { 'magic' : None , 'headers' : None , 'body' : bytearray ( ) , 'crc' : None } if not Armorable . is_ascii ( text ) : m [ 'body' ] = bytearray ( text ) return m if isinstance ( text , ( bytes , bytearray ) ) : text = text . decode ( 'latin-1' ) m = Armorable . __armor_regex . search ( text ) if m is None : raise ValueError ( "Expected: ASCII-armored PGP data" ) m = m . groupdict ( ) if m [ 'hashes' ] is not None : m [ 'hashes' ] = m [ 'hashes' ] . split ( ',' ) if m [ 'headers' ] is not None : m [ 'headers' ] = collections . OrderedDict ( re . findall ( '^(?P<key>.+): (?P<value>.+)$\n?' , m [ 'headers' ] , flags = re . MULTILINE ) ) if m [ 'body' ] is not None : try : m [ 'body' ] = bytearray ( base64 . b64decode ( m [ 'body' ] . encode ( ) ) ) except ( binascii . Error , TypeError ) as ex : six . raise_from ( PGPError , ex ) six . raise_from ( PGPError ( str ( ex ) ) , ex ) if m [ 'crc' ] is not None : m [ 'crc' ] = Header . bytes_to_int ( base64 . b64decode ( m [ 'crc' ] . encode ( ) ) ) if Armorable . crc24 ( m [ 'body' ] ) != m [ 'crc' ] : warnings . warn ( 'Incorrect crc24' , stacklevel = 3 ) return m
Takes an ASCII - armored PGP block and returns the decoded byte value .
21,128
def bytes_to_int ( b , order = 'big' ) : if six . PY2 : _b = b . __class__ ( ) if order != 'little' : b = reversed ( b ) if not isinstance ( _b , bytearray ) : b = six . iterbytes ( b ) return sum ( c << ( i * 8 ) for i , c in enumerate ( b ) ) return int . from_bytes ( b , order )
convert bytes to integer
21,129
def int_to_bytes ( i , minlen = 1 , order = 'big' ) : blen = max ( minlen , PGPObject . int_byte_len ( i ) , 1 ) if six . PY2 : r = iter ( _ * 8 for _ in ( range ( blen ) if order == 'little' else range ( blen - 1 , - 1 , - 1 ) ) ) return bytes ( bytearray ( ( i >> c ) & 0xff for c in r ) ) return i . to_bytes ( blen , order )
convert integer to bytes
21,130
def check ( self ) : for unsorted in iter ( self [ i ] for i in range ( len ( self ) - 2 ) if not operator . le ( self [ i ] , self [ i + 1 ] ) ) : self . resort ( unsorted )
re - sort any items in self that are not sorted
21,131
def sdmethod ( meth ) : sd = singledispatch ( meth ) def wrapper ( obj , * args , ** kwargs ) : return sd . dispatch ( args [ 0 ] . __class__ ) ( obj , * args , ** kwargs ) wrapper . register = sd . register wrapper . dispatch = sd . dispatch wrapper . registry = sd . registry wrapper . _clear_cache = sd . _clear_cache functools . update_wrapper ( wrapper , meth ) return wrapper
This is a hack to monkey patch sdproperty to work as expected with instance methods .
21,132
def mixedcase ( path ) : words = path . split ( '_' ) return words [ 0 ] + '' . join ( word . title ( ) for word in words [ 1 : ] )
Removes underscores and capitalizes the neighbouring character
21,133
def _log ( self , message , debug = None , ** kwargs ) : display_log = self . debug if debug is not None : display_log = debug if display_log : print ( message . format ( ** kwargs ) )
Outputs a formatted message in the console if the debug mode is activated .
21,134
def send_request ( self , * args , ** kwargs ) : try : return self . session . request ( * args , ** kwargs ) except ConnectionError : self . session . close ( ) return self . session . request ( * args , ** kwargs )
Wrapper for session . request Handle connection reset error even from pyopenssl
21,135
def address ( self ) : path = urlsplit ( self . target ) . path suffix = '/' if not path or path . endswith ( '/' ) else '' return '%s%s/%s%s' % ( self . _ui_address [ : - 1 ] , self . _proxy_prefix , self . route , suffix )
The full proxied address to this page
21,136
def add_page ( self , route , target , link_name = None ) : req = proto . Proxy ( route = route , target = target , link_name = link_name ) self . _client . _call ( 'AddProxy' , req ) return ProxiedPage ( route , target , link_name , self . address , self . proxy_prefix )
Add a new proxied page to the Web UI .
21,137
def remove_page ( self , route ) : req = proto . RemoveProxyRequest ( route = route ) self . _client . _call ( 'RemoveProxy' , req )
Remove a proxied page from the Web UI .
21,138
def get_pages ( self ) : resp = self . _client . _call ( 'GetProxies' , proto . GetProxiesRequest ( ) ) return { i . route : ProxiedPage ( i . route , i . target , i . link_name if i . link_name else None , self . address , self . proxy_prefix ) for i in resp . proxy }
Get all registered pages .
21,139
async def tcp_echo_client ( message , loop , host , port ) : print ( "Connecting to server at %s:%d" % ( host , port ) ) reader , writer = await asyncio . open_connection ( host , port , loop = loop ) writer . write ( message . encode ( ) ) print ( 'Sent: %r' % message ) data = await reader . read ( 100 ) print ( 'Received: %r' % data . decode ( ) ) writer . close ( )
Generic python tcp echo client
21,140
async def echo_all ( app , message ) : for address in app . kv . get_prefix ( 'address.' ) . values ( ) : host , port = address . decode ( ) . split ( ':' ) port = int ( port ) await tcp_echo_client ( message , loop , host , port )
Send and recieve a message from all running echo servers
21,141
def from_global_driver ( self ) : address , _ = _read_driver ( ) if address is None : raise DriverNotRunningError ( "No driver currently running" ) security = Security . from_default ( ) return Client ( address = address , security = security )
Connect to the global driver .
21,142
def start_global_driver ( keytab = None , principal = None , log = None , log_level = None , java_options = None ) : address , pid = _read_driver ( ) if address is not None : try : Client ( address = address ) return address except ConnectionError : if pid_exists ( pid ) : raise context . warn ( "Previous driver at %s, PID %d has died. Restarting." % ( address , pid ) ) address , _ = _start_driver ( set_global = True , keytab = keytab , principal = principal , log = log , log_level = log_level , java_options = java_options ) return address
Start the global driver .
21,143
def stop_global_driver ( force = False ) : address , pid = _read_driver ( ) if address is None : return if not force : try : Client ( address = address ) except ConnectionError : if pid_exists ( pid ) : raise try : os . kill ( pid , signal . SIGTERM ) except OSError as exc : ignore = ( errno . ESRCH , errno . EPERM ) if force else ( errno . ESRCH , ) if exc . errno not in ignore : raise try : os . remove ( os . path . join ( properties . config_dir , 'driver' ) ) except OSError : pass
Stops the global driver if running .
21,144
def close ( self ) : if self . _proc is not None : self . _proc . stdin . close ( ) self . _proc . wait ( )
Closes the java driver if started by this client . No - op otherwise .
21,145
def submit ( self , spec ) : spec = ApplicationSpec . _from_any ( spec ) resp = self . _call ( 'submit' , spec . to_protobuf ( ) ) return resp . id
Submit a new skein application .
21,146
def submit_and_connect ( self , spec ) : spec = ApplicationSpec . _from_any ( spec ) app_id = self . submit ( spec ) try : return self . connect ( app_id , security = spec . master . security ) except BaseException : self . kill_application ( app_id ) raise
Submit a new skein application and wait to connect to it .
21,147
def connect ( self , app_id , wait = True , security = None ) : if wait : resp = self . _call ( 'waitForStart' , proto . Application ( id = app_id ) ) else : resp = self . _call ( 'getStatus' , proto . Application ( id = app_id ) ) report = ApplicationReport . from_protobuf ( resp ) if report . state is not ApplicationState . RUNNING : raise ApplicationNotRunningError ( "%s is not running. Application state: " "%s" % ( app_id , report . state ) ) if security is None : security = self . security return ApplicationClient ( '%s:%d' % ( report . host , report . port ) , app_id , security = security )
Connect to a running application .
21,148
def get_applications ( self , states = None , name = None , user = None , queue = None , started_begin = None , started_end = None , finished_begin = None , finished_end = None ) : if states is not None : states = tuple ( ApplicationState ( s ) for s in states ) else : states = ( ApplicationState . SUBMITTED , ApplicationState . ACCEPTED , ApplicationState . RUNNING ) started_begin = self . _parse_datetime ( started_begin , 'started_begin' ) started_end = self . _parse_datetime ( started_end , 'started_end' ) finished_begin = self . _parse_datetime ( finished_begin , 'finished_begin' ) finished_end = self . _parse_datetime ( finished_end , 'finished_end' ) req = proto . ApplicationsRequest ( states = [ str ( s ) for s in states ] , name = name , user = user , queue = queue , started_begin = datetime_to_millis ( started_begin ) , started_end = datetime_to_millis ( started_end ) , finished_begin = datetime_to_millis ( finished_begin ) , finished_end = datetime_to_millis ( finished_end ) ) resp = self . _call ( 'getApplications' , req ) return sorted ( ( ApplicationReport . from_protobuf ( r ) for r in resp . reports ) , key = lambda x : x . id )
Get the status of current skein applications .
21,149
def get_nodes ( self , states = None ) : if states is not None : states = tuple ( NodeState ( s ) for s in states ) else : states = ( ) req = proto . NodesRequest ( states = [ str ( s ) for s in states ] ) resp = self . _call ( 'getNodes' , req ) return sorted ( ( NodeReport . from_protobuf ( r ) for r in resp . reports ) , key = lambda x : x . id )
Get the status of nodes in the cluster .
21,150
def get_queue ( self , name ) : req = proto . QueueRequest ( name = name ) resp = self . _call ( 'getQueue' , req ) return Queue . from_protobuf ( resp )
Get information about a queue .
21,151
def get_child_queues ( self , name ) : req = proto . QueueRequest ( name = name ) resp = self . _call ( 'getChildQueues' , req ) return [ Queue . from_protobuf ( q ) for q in resp . queues ]
Get information about all children of a parent queue .
21,152
def get_all_queues ( self ) : resp = self . _call ( 'getAllQueues' , proto . Empty ( ) ) return [ Queue . from_protobuf ( q ) for q in resp . queues ]
Get information about all queues in the cluster .
21,153
def application_report ( self , app_id ) : resp = self . _call ( 'getStatus' , proto . Application ( id = app_id ) ) return ApplicationReport . from_protobuf ( resp )
Get a report on the status of a skein application .
21,154
def move_application ( self , app_id , queue ) : self . _call ( 'moveApplication' , proto . MoveRequest ( id = app_id , queue = queue ) )
Move an application to a different queue .
21,155
def kill_application ( self , app_id , user = "" ) : self . _call ( 'kill' , proto . KillRequest ( id = app_id , user = user ) )
Kill an application .
21,156
def shutdown ( self , status = 'SUCCEEDED' , diagnostics = None ) : req = proto . ShutdownRequest ( final_status = str ( FinalStatus ( status ) ) , diagnostics = diagnostics ) self . _call ( 'shutdown' , req )
Shutdown the application .
21,157
def get_specification ( self ) : resp = self . _call ( 'getApplicationSpec' , proto . Empty ( ) ) return ApplicationSpec . from_protobuf ( resp )
Get the specification for the running application .
21,158
def scale ( self , service , count = None , delta = None , ** kwargs ) : if 'instances' in kwargs : count = kwargs . pop ( 'instances' ) warnings . warn ( "instances is deprecated, use count instead" ) assert not kwargs if count is not None and delta is not None : raise context . ValueError ( "cannot specify both `count` and `delta`" ) elif count is None and delta is None : raise context . ValueError ( "must specify either `count` or `delta`" ) if count and count < 0 : raise context . ValueError ( "count must be >= 0" ) req = proto . ScaleRequest ( service_name = service , count = count , delta = delta ) resp = self . _call ( 'scale' , req ) return [ Container . from_protobuf ( c ) for c in resp . containers ]
Scale a service to a requested number of instances .
21,159
def set_progress ( self , progress ) : if not ( 0 <= progress <= 1.0 ) : raise ValueError ( "progress must be between 0 and 1, got %.3f" % progress ) self . _call ( 'SetProgress' , proto . SetProgressRequest ( progress = progress ) )
Update the progress for this application .
21,160
def from_current ( cls ) : if properties . application_id is None : raise context . ValueError ( "Not running inside a container" ) return cls ( properties . appmaster_address , properties . application_id , security = Security . from_default ( ) )
Create an application client from within a running container .
21,161
def get_containers ( self , services = None , states = None ) : if services is not None : services = set ( services ) if states is not None : states = [ str ( ContainerState ( s ) ) for s in states ] req = proto . ContainersRequest ( services = services , states = states ) resp = self . _call ( 'getContainers' , req ) return sorted ( ( Container . from_protobuf ( c ) for c in resp . containers ) , key = lambda x : ( x . service_name , x . instance ) )
Get information on containers in this application .
21,162
def from_protobuf ( cls , msg ) : if not isinstance ( msg , cls . _protobuf_cls ) : raise TypeError ( "Expected message of type " "%r" % cls . _protobuf_cls . __name__ ) kwargs = { k : getattr ( msg , k ) for k in cls . _get_params ( ) } return cls ( ** kwargs )
Create an instance from a protobuf message .
21,163
def to_protobuf ( self ) : self . _validate ( ) kwargs = { k : _convert ( getattr ( self , k ) , 'to_protobuf' ) for k in self . _get_params ( ) } return self . _protobuf_cls ( ** kwargs )
Convert object to a protobuf message
21,164
def to_dict ( self , skip_nulls = True ) : self . _validate ( ) out = { } for k in self . _get_params ( ) : val = getattr ( self , k ) if not skip_nulls or val is not None : out [ k ] = _convert ( val , 'to_dict' , skip_nulls ) return out
Convert object to a dict
21,165
def to_json ( self , skip_nulls = True ) : return json . dumps ( self . to_dict ( skip_nulls = skip_nulls ) )
Convert object to a json string
21,166
def to_yaml ( self , skip_nulls = True ) : return yaml . safe_dump ( self . to_dict ( skip_nulls = skip_nulls ) , default_flow_style = False )
Convert object to a yaml string
21,167
def build_html ( ) : source = AjaxDataSource ( data_url = './data' , polling_interval = INTERVAL , method = 'GET' ) p = figure ( plot_height = 400 , title = 'OHLC' , sizing_mode = 'scale_width' , tools = "xpan,xwheel_zoom,xbox_zoom,reset" , x_axis_type = None , y_axis_location = "right" , y_axis_label = "Price ($)" ) p . x_range . follow = "end" p . x_range . follow_interval = 100 p . x_range . range_padding = 0 p . line ( x = 'time' , y = 'average' , alpha = 0.25 , line_width = 3 , color = 'black' , source = source ) p . line ( x = 'time' , y = 'ma' , alpha = 0.8 , line_width = 2 , color = 'steelblue' , source = source ) p . segment ( x0 = 'time' , y0 = 'low' , x1 = 'time' , y1 = 'high' , line_width = 2 , color = 'black' , source = source ) p . segment ( x0 = 'time' , y0 = 'open' , x1 = 'time' , y1 = 'close' , line_width = 8 , color = 'color' , source = source , alpha = 0.8 ) p2 = figure ( plot_height = 200 , title = 'MACD' , sizing_mode = 'scale_width' , x_range = p . x_range , x_axis_label = 'Time (s)' , tools = "xpan,xwheel_zoom,xbox_zoom,reset" , y_axis_location = "right" ) p2 . line ( x = 'time' , y = 'macd' , color = 'darkred' , line_width = 2 , source = source ) p2 . line ( x = 'time' , y = 'macd9' , color = 'navy' , line_width = 2 , source = source ) p2 . segment ( x0 = 'time' , y0 = 0 , x1 = 'time' , y1 = 'macdh' , line_width = 6 , color = 'steelblue' , alpha = 0.5 , source = source ) plot = gridplot ( [ [ p ] , [ p2 ] ] , toolbar_location = "left" , plot_width = 1000 ) script , div = components ( plot , theme = theme ) html = template . render ( resources = CDN . render ( ) , script = script , div = div ) return html
Build the html to be served by IndexHandler
21,168
def update ( self ) : self . t += 1000 / INTERVAL self . average *= np . random . lognormal ( 0 , 0.04 ) high = self . average * np . exp ( np . abs ( np . random . gamma ( 1 , 0.03 ) ) ) low = self . average / np . exp ( np . abs ( np . random . gamma ( 1 , 0.03 ) ) ) delta = high - low open = low + delta * np . random . uniform ( 0.05 , 0.95 ) close = low + delta * np . random . uniform ( 0.05 , 0.95 ) color = "darkgreen" if open < close else "darkred" for k , point in [ ( 'time' , self . t ) , ( 'average' , self . average ) , ( 'open' , open ) , ( 'high' , high ) , ( 'low' , low ) , ( 'close' , close ) , ( 'color' , color ) ] : self . data [ k ] . append ( point ) ema12 = self . _ema ( self . data [ 'close' ] , self . kernel12 ) ema26 = self . _ema ( self . data [ 'close' ] , self . kernel26 ) macd = ema12 - ema26 self . data [ 'ma' ] . append ( ema12 ) self . data [ 'macd' ] . append ( macd ) macd9 = self . _ema ( self . data [ 'macd' ] , self . kernel9 ) self . data [ 'macd9' ] . append ( macd9 ) self . data [ 'macdh' ] . append ( macd - macd9 )
Compute the next element in the stream and update the plot data
21,169
def container_instance_from_string ( id ) : try : service , instance = id . rsplit ( '_' , 1 ) instance = int ( instance ) except ( TypeError , ValueError ) : raise context . ValueError ( "Invalid container id %r" % id ) return _proto . ContainerInstance ( service_name = service , instance = instance )
Create a ContainerInstance from an id string
21,170
def parse_memory ( s ) : if isinstance ( s , integer ) : out = s elif isinstance ( s , float ) : out = math_ceil ( s ) elif isinstance ( s , string ) : s = s . replace ( ' ' , '' ) if not s : raise context . ValueError ( "Could not interpret %r as a byte unit" % s ) if s [ 0 ] . isdigit ( ) : for i , c in enumerate ( reversed ( s ) ) : if not c . isalpha ( ) : break index = len ( s ) - i prefix = s [ : index ] suffix = s [ index : ] try : n = float ( prefix ) except ValueError : raise context . ValueError ( "Could not interpret %r as a number" % prefix ) else : n = 1 suffix = s try : multiplier = _byte_sizes [ suffix . lower ( ) ] except KeyError : raise context . ValueError ( "Could not interpret %r as a byte unit" % suffix ) out = math_ceil ( n * multiplier / ( 2 ** 20 ) ) else : raise context . TypeError ( "memory must be an integer, got %r" % type ( s ) . __name__ ) if out < 0 : raise context . ValueError ( "memory must be positive" ) return out
Converts bytes expression to number of mebibytes .
21,171
def from_default ( cls ) : from . core import properties if properties . application_id is not None : if properties . container_dir is not None : cert_path = os . path . join ( properties . container_dir , '.skein.crt' ) key_path = os . path . join ( properties . container_dir , '.skein.pem' ) if os . path . exists ( cert_path ) and os . path . exists ( key_path ) : return Security ( cert_file = cert_path , key_file = key_path ) raise context . FileNotFoundError ( "Failed to resolve .skein.{crt,pem} in 'LOCAL_DIRS'" ) try : return cls . from_directory ( properties . config_dir ) except FileNotFoundError : pass new = cls . new_credentials ( ) try : out = new . to_directory ( properties . config_dir ) context . warn ( "Skein global security credentials not found, " "writing now to %r." % properties . config_dir ) except FileExistsError : out = cls . from_directory ( properties . config_dir ) return out
The default security configuration .
21,172
def from_directory ( cls , directory ) : cert_path = os . path . join ( directory , 'skein.crt' ) key_path = os . path . join ( directory , 'skein.pem' ) for path , name in [ ( cert_path , 'cert' ) , ( key_path , 'key' ) ] : if not os . path . exists ( path ) : raise context . FileNotFoundError ( "Security %s file not found at %r" % ( name , path ) ) return Security ( cert_file = cert_path , key_file = key_path )
Create a security object from a directory .
21,173
def to_directory ( self , directory , force = False ) : self . _validate ( ) makedirs ( directory , exist_ok = True ) cert_path = os . path . join ( directory , 'skein.crt' ) key_path = os . path . join ( directory , 'skein.pem' ) cert_bytes = self . _get_bytes ( 'cert' ) key_bytes = self . _get_bytes ( 'key' ) lock_path = os . path . join ( directory , 'skein.lock' ) with lock_file ( lock_path ) : for path , name in [ ( cert_path , 'skein.crt' ) , ( key_path , 'skein.pem' ) ] : if os . path . exists ( path ) : if force : os . unlink ( path ) else : msg = ( "%r file already exists, use `%s` to overwrite" % ( name , '--force' if context . is_cli else 'force' ) ) raise context . FileExistsError ( msg ) flags = os . O_WRONLY | os . O_CREAT | os . O_EXCL for path , data in [ ( cert_path , cert_bytes ) , ( key_path , key_bytes ) ] : with os . fdopen ( os . open ( path , flags , 0o600 ) , 'wb' ) as fil : fil . write ( data ) return Security ( cert_file = cert_path , key_file = key_path )
Write this security object to a directory .
21,174
def _from_any ( cls , spec ) : if isinstance ( spec , str ) : spec = cls . from_file ( spec ) elif isinstance ( spec , dict ) : spec = cls . from_dict ( spec ) elif not isinstance ( spec , cls ) : raise context . TypeError ( "spec must be either an ApplicationSpec, " "path, or dict, got " "%s" % type ( spec ) . __name__ ) return spec
Generic creation method for all types accepted as spec
21,175
def from_file ( cls , path , format = 'infer' ) : format = _infer_format ( path , format = format ) origin = os . path . abspath ( os . path . dirname ( path ) ) with open ( path ) as f : data = f . read ( ) if format == 'json' : obj = json . loads ( data ) else : obj = yaml . safe_load ( data ) return cls . from_dict ( obj , _origin = origin )
Create an instance from a json or yaml file .
21,176
def to_file ( self , path , format = 'infer' , skip_nulls = True ) : format = _infer_format ( path , format = format ) data = getattr ( self , 'to_' + format ) ( skip_nulls = skip_nulls ) with open ( path , mode = 'w' ) as f : f . write ( data )
Write object to a file .
21,177
def lock_file ( path ) : with _paths_lock : lock = _paths_to_locks . get ( path ) if lock is None : _paths_to_locks [ path ] = lock = _FileLock ( path ) return lock
File based lock on path .
21,178
def grpc_fork_support_disabled ( ) : if LooseVersion ( GRPC_VERSION ) < '1.18.0' : key = 'GRPC_ENABLE_FORK_SUPPORT' try : os . environ [ key ] = '0' yield finally : del os . environ [ key ] else : yield
Temporarily disable fork support in gRPC .
21,179
def humanize_timedelta ( td ) : secs = int ( td . total_seconds ( ) ) hours , secs = divmod ( secs , 60 * 60 ) mins , secs = divmod ( secs , 60 ) if hours : return '%dh %dm' % ( hours , mins ) if mins : return '%dm' % mins return '%ds' % secs
Pretty - print a timedelta in a human readable format .
21,180
def datetime_to_millis ( x ) : if x is None : return None if hasattr ( x , 'timestamp' ) : secs = x . timestamp ( ) elif x . tzinfo is None : secs = ( time . mktime ( ( x . year , x . month , x . day , x . hour , x . minute , x . second , - 1 , - 1 , - 1 ) ) + x . microsecond / 1e6 ) else : secs = ( x - _EPOCH ) . total_seconds ( ) return int ( secs * 1000 )
Convert a datetime . datetime to milliseconds since the epoch
21,181
def format_table ( columns , rows ) : rows = [ tuple ( str ( i ) for i in r ) for r in rows ] columns = tuple ( str ( i ) . upper ( ) for i in columns ) if rows : widths = tuple ( max ( max ( map ( len , x ) ) , len ( c ) ) for x , c in zip ( zip ( * rows ) , columns ) ) else : widths = tuple ( map ( len , columns ) ) row_template = ( ' ' . join ( '%%-%ds' for _ in columns ) ) % widths header = ( row_template % tuple ( columns ) ) . strip ( ) if rows : data = '\n' . join ( ( row_template % r ) . strip ( ) for r in rows ) return '\n' . join ( [ header , data ] ) else : return header
Formats an ascii table for given columns and rows .
21,182
def build_attrs ( self , * args , ** kwargs ) : attrs = super ( Select2Mixin , self ) . build_attrs ( * args , ** kwargs ) if self . is_required : attrs . setdefault ( 'data-allow-clear' , 'false' ) else : attrs . setdefault ( 'data-allow-clear' , 'true' ) attrs . setdefault ( 'data-placeholder' , '' ) attrs . setdefault ( 'data-minimum-input-length' , 0 ) if 'class' in attrs : attrs [ 'class' ] += ' django-select2' else : attrs [ 'class' ] = 'django-select2' return attrs
Add select2 data attributes .
21,183
def optgroups ( self , name , value , attrs = None ) : if not self . is_required and not self . allow_multiple_selected : self . choices = list ( chain ( [ ( '' , '' ) ] , self . choices ) ) return super ( Select2Mixin , self ) . optgroups ( name , value , attrs = attrs )
Add empty option for clearable selects .
21,184
def _get_media ( self ) : lang = get_language ( ) select2_js = ( settings . SELECT2_JS , ) if settings . SELECT2_JS else ( ) select2_css = ( settings . SELECT2_CSS , ) if settings . SELECT2_CSS else ( ) i18n_name = SELECT2_TRANSLATIONS . get ( lang ) if i18n_name not in settings . SELECT2_I18N_AVAILABLE_LANGUAGES : i18n_name = None i18n_file = ( '%s/%s.js' % ( settings . SELECT2_I18N_PATH , i18n_name ) , ) if i18n_name else ( ) return forms . Media ( js = select2_js + i18n_file + ( 'django_select2/django_select2.js' , ) , css = { 'screen' : select2_css } )
Construct Media as a dynamic property .
21,185
def build_attrs ( self , * args , ** kwargs ) : self . attrs . setdefault ( 'data-minimum-input-length' , 1 ) self . attrs . setdefault ( 'data-tags' , 'true' ) self . attrs . setdefault ( 'data-token-separators' , '[",", " "]' ) return super ( Select2TagMixin , self ) . build_attrs ( * args , ** kwargs )
Add select2 s tag attributes .
21,186
def build_attrs ( self , * args , ** kwargs ) : attrs = super ( HeavySelect2Mixin , self ) . build_attrs ( * args , ** kwargs ) self . widget_id = signing . dumps ( id ( self ) ) attrs [ 'data-field_id' ] = self . widget_id attrs . setdefault ( 'data-ajax--url' , self . get_url ( ) ) attrs . setdefault ( 'data-ajax--cache' , "true" ) attrs . setdefault ( 'data-ajax--type' , "GET" ) attrs . setdefault ( 'data-minimum-input-length' , 2 ) if self . dependent_fields : attrs . setdefault ( 'data-select2-dependent-fields' , " " . join ( self . dependent_fields ) ) attrs [ 'class' ] += ' django-select2-heavy' return attrs
Set select2 s AJAX attributes .
21,187
def render ( self , * args , ** kwargs ) : output = super ( HeavySelect2Mixin , self ) . render ( * args , ** kwargs ) self . set_to_cache ( ) return output
Render widget and register it in Django s cache .
21,188
def set_to_cache ( self ) : try : cache . set ( self . _get_cache_key ( ) , { 'widget' : self , 'url' : self . get_url ( ) , } ) except ( PicklingError , AttributeError ) : msg = "You need to overwrite \"set_to_cache\" or ensure that %s is serialisable." raise NotImplementedError ( msg % self . __class__ . __name__ )
Add widget object to Django s cache .
21,189
def set_to_cache ( self ) : queryset = self . get_queryset ( ) cache . set ( self . _get_cache_key ( ) , { 'queryset' : [ queryset . none ( ) , queryset . query , ] , 'cls' : self . __class__ , 'search_fields' : tuple ( self . search_fields ) , 'max_results' : int ( self . max_results ) , 'url' : str ( self . get_url ( ) ) , 'dependent_fields' : dict ( self . dependent_fields ) , } )
Add widget s attributes to Django s cache .
21,190
def filter_queryset ( self , request , term , queryset = None , ** dependent_fields ) : if queryset is None : queryset = self . get_queryset ( ) search_fields = self . get_search_fields ( ) select = Q ( ) term = term . replace ( '\t' , ' ' ) term = term . replace ( '\n' , ' ' ) for t in [ t for t in term . split ( ' ' ) if not t == '' ] : select &= reduce ( lambda x , y : x | Q ( ** { y : t } ) , search_fields , Q ( ** { search_fields [ 0 ] : t } ) ) if dependent_fields : select &= Q ( ** dependent_fields ) return queryset . filter ( select ) . distinct ( )
Return QuerySet filtered by search_fields matching the passed term .
21,191
def get_search_fields ( self ) : if self . search_fields : return self . search_fields raise NotImplementedError ( '%s, must implement "search_fields".' % self . __class__ . __name__ )
Return list of lookup names .
21,192
def optgroups ( self , name , value , attrs = None ) : default = ( None , [ ] , 0 ) groups = [ default ] has_selected = False selected_choices = { str ( v ) for v in value } if not self . is_required and not self . allow_multiple_selected : default [ 1 ] . append ( self . create_option ( name , '' , '' , False , 0 ) ) if not isinstance ( self . choices , ModelChoiceIterator ) : return super ( ModelSelect2Mixin , self ) . optgroups ( name , value , attrs = attrs ) selected_choices = { c for c in selected_choices if c not in self . choices . field . empty_values } field_name = self . choices . field . to_field_name or 'pk' query = Q ( ** { '%s__in' % field_name : selected_choices } ) for obj in self . choices . queryset . filter ( query ) : option_value = self . choices . choice ( obj ) [ 0 ] option_label = self . label_from_instance ( obj ) selected = ( str ( option_value ) in value and ( has_selected is False or self . allow_multiple_selected ) ) if selected is True and has_selected is False : has_selected = True index = len ( default [ 1 ] ) subgroup = default [ 1 ] subgroup . append ( self . create_option ( name , option_value , option_label , selected_choices , index ) ) return groups
Return only selected options and set QuerySet from ModelChoicesIterator .
21,193
def get_queryset ( self ) : kwargs = { model_field_name : self . request . GET . get ( form_field_name ) for form_field_name , model_field_name in self . widget . dependent_fields . items ( ) if form_field_name in self . request . GET and self . request . GET . get ( form_field_name , '' ) != '' } return self . widget . filter_queryset ( self . request , self . term , self . queryset , ** kwargs )
Get QuerySet from cached widget .
21,194
def get_widget_or_404 ( self ) : field_id = self . kwargs . get ( 'field_id' , self . request . GET . get ( 'field_id' , None ) ) if not field_id : raise Http404 ( 'No "field_id" provided.' ) try : key = signing . loads ( field_id ) except BadSignature : raise Http404 ( 'Invalid "field_id".' ) else : cache_key = '%s%s' % ( settings . SELECT2_CACHE_PREFIX , key ) widget_dict = cache . get ( cache_key ) if widget_dict is None : raise Http404 ( 'field_id not found' ) if widget_dict . pop ( 'url' ) != self . request . path : raise Http404 ( 'field_id was issued for the view.' ) qs , qs . query = widget_dict . pop ( 'queryset' ) self . queryset = qs . all ( ) widget_dict [ 'queryset' ] = self . queryset widget_cls = widget_dict . pop ( 'cls' ) return widget_cls ( ** widget_dict )
Get and return widget from cache .
21,195
def load_widget ( path ) : i = path . rfind ( '.' ) module , attr = path [ : i ] , path [ i + 1 : ] try : mod = import_module ( module ) except ( ImportError , ValueError ) as e : error_message = 'Error importing widget for BleachField %s: "%s"' raise ImproperlyConfigured ( error_message % ( path , e ) ) try : cls = getattr ( mod , attr ) except AttributeError : raise ImproperlyConfigured ( 'Module "%s" does not define a "%s" widget' % ( module , attr ) ) return cls
Load custom widget for the form field
21,196
def get_default_widget ( ) : default_widget = forms . Textarea if hasattr ( settings , 'BLEACH_DEFAULT_WIDGET' ) : default_widget = load_widget ( settings . BLEACH_DEFAULT_WIDGET ) return default_widget
Get the default widget or the widget defined in settings
21,197
def to_python ( self , value ) : if value in self . empty_values : try : return self . empty_value except AttributeError : return u'' return bleach . clean ( value , ** self . bleach_options )
Strips any dodgy HTML tags from the input
21,198
def build_dynamic_field ( self , group , field_meta ) : schema = field_meta [ 'schema' ] fieldtype = 'text' fkwargs = { 'label' : field_meta [ 'name' ] , 'required' : field_meta [ 'required' ] , } if ( schema [ 'type' ] in [ 'securitylevel' , 'priority' ] or schema . get ( 'custom' ) == JIRA_CUSTOM_FIELD_TYPES [ 'select' ] ) : fieldtype = 'select' fkwargs [ 'choices' ] = self . make_choices ( field_meta . get ( 'allowedValues' ) ) elif field_meta . get ( 'autoCompleteUrl' ) and ( schema . get ( 'items' ) == 'user' or schema [ 'type' ] == 'user' ) : fieldtype = 'select' sentry_url = '/api/0/issues/%s/plugins/%s/autocomplete' % ( group . id , self . slug ) fkwargs [ 'url' ] = '%s?jira_url=%s' % ( sentry_url , quote_plus ( field_meta [ 'autoCompleteUrl' ] ) , ) fkwargs [ 'has_autocomplete' ] = True fkwargs [ 'placeholder' ] = 'Start typing to search for a user' elif schema [ 'type' ] in [ 'timetracking' ] : return None elif schema . get ( 'items' ) in [ 'worklog' , 'attachment' ] : return None elif schema [ 'type' ] == 'array' and schema [ 'items' ] != 'string' : fieldtype = 'select' fkwargs . update ( { 'multiple' : True , 'choices' : self . make_choices ( field_meta . get ( 'allowedValues' ) ) , 'default' : [ ] } ) if schema . get ( 'custom' ) : if schema [ 'custom' ] == JIRA_CUSTOM_FIELD_TYPES [ 'textarea' ] : fieldtype = 'textarea' fkwargs [ 'type' ] = fieldtype return fkwargs
Builds a field based on JIRA s meta field information
21,199
def create_issue ( self , request , group , form_data , ** kwargs ) : instance = self . get_option ( 'instance' , group . project ) project = ( form_data . get ( 'project' ) or self . get_option ( 'default_project' , group . project ) ) client = self . get_client ( request . user ) title = form_data [ 'title' ] description = form_data [ 'description' ] link = absolute_uri ( group . get_absolute_url ( params = { 'referrer' : 'vsts_plugin' } ) ) try : created_item = client . create_work_item ( instance = instance , project = project , title = title , comment = markdown ( description ) , link = link , ) except Exception as e : self . raise_error ( e , identity = client . auth ) return { 'id' : created_item [ 'id' ] , 'url' : created_item [ '_links' ] [ 'html' ] [ 'href' ] , 'title' : title , }
Creates the issue on the remote service and returns an issue ID .