idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
241,800
def create_agent_signer ( user_id ) : sock = connect_to_agent ( env = os . environ ) keygrip = get_keygrip ( user_id ) def sign ( digest ) : """Sign the digest and return an ECDSA/RSA/DSA signature.""" return sign_digest ( sock = sock , keygrip = keygrip , digest = digest ) return sign
Sign digest with existing GPG keys using gpg - agent tool .
91
14
241,801
def msg_name ( code ) : ids = { v : k for k , v in COMMANDS . items ( ) } return ids [ code ]
Convert integer message code into a string name .
34
10
241,802
def _legacy_pubs ( buf ) : leftover = buf . read ( ) if leftover : log . warning ( 'skipping leftover: %r' , leftover ) code = util . pack ( 'B' , msg_code ( 'SSH_AGENT_RSA_IDENTITIES_ANSWER' ) ) num = util . pack ( 'L' , 0 ) # no SSH v1 keys return util . frame ( code , num )
SSH v1 public keys are not supported .
95
10
241,803
def handle ( self , msg ) : debug_msg = ': {!r}' . format ( msg ) if self . debug else '' log . debug ( 'request: %d bytes%s' , len ( msg ) , debug_msg ) buf = io . BytesIO ( msg ) code , = util . recv ( buf , '>B' ) if code not in self . methods : log . warning ( 'Unsupported command: %s (%d)' , msg_name ( code ) , code ) return failure ( ) method = self . methods [ code ] log . debug ( 'calling %s()' , method . __name__ ) reply = method ( buf = buf ) debug_reply = ': {!r}' . format ( reply ) if self . debug else '' log . debug ( 'reply: %d bytes%s' , len ( reply ) , debug_reply ) return reply
Handle SSH message from the SSH client and return the response .
194
12
241,804
def list_pubs ( self , buf ) : assert not buf . read ( ) keys = self . conn . parse_public_keys ( ) code = util . pack ( 'B' , msg_code ( 'SSH2_AGENT_IDENTITIES_ANSWER' ) ) num = util . pack ( 'L' , len ( keys ) ) log . debug ( 'available keys: %s' , [ k [ 'name' ] for k in keys ] ) for i , k in enumerate ( keys ) : log . debug ( '%2d) %s' , i + 1 , k [ 'fingerprint' ] ) pubs = [ util . frame ( k [ 'blob' ] ) + util . frame ( k [ 'name' ] ) for k in keys ] return util . frame ( code , num , * pubs )
SSH v2 public keys are serialized and returned .
181
12
241,805
def sign_message ( self , buf ) : key = formats . parse_pubkey ( util . read_frame ( buf ) ) log . debug ( 'looking for %s' , key [ 'fingerprint' ] ) blob = util . read_frame ( buf ) assert util . read_frame ( buf ) == b'' assert not buf . read ( ) for k in self . conn . parse_public_keys ( ) : if ( k [ 'fingerprint' ] ) == ( key [ 'fingerprint' ] ) : log . debug ( 'using key %r (%s)' , k [ 'name' ] , k [ 'fingerprint' ] ) key = k break else : raise KeyError ( 'key not found' ) label = key [ 'name' ] . decode ( 'utf-8' ) log . debug ( 'signing %d-byte blob with "%s" key' , len ( blob ) , label ) try : signature = self . conn . sign ( blob = blob , identity = key [ 'identity' ] ) except IOError : return failure ( ) log . debug ( 'signature: %r' , signature ) try : sig_bytes = key [ 'verifier' ] ( sig = signature , msg = blob ) log . info ( 'signature status: OK' ) except formats . ecdsa . BadSignatureError : log . exception ( 'signature status: ERROR' ) raise ValueError ( 'invalid ECDSA signature' ) log . debug ( 'signature size: %d bytes' , len ( sig_bytes ) ) data = util . frame ( util . frame ( key [ 'type' ] ) , util . frame ( sig_bytes ) ) code = util . pack ( 'B' , msg_code ( 'SSH2_AGENT_SIGN_RESPONSE' ) ) return util . frame ( code , data )
SSH v2 public key authentication is performed .
404
10
241,806
def recv ( conn , size ) : try : fmt = size size = struct . calcsize ( fmt ) except TypeError : fmt = None try : _read = conn . recv except AttributeError : _read = conn . read res = io . BytesIO ( ) while size > 0 : buf = _read ( size ) if not buf : raise EOFError size = size - len ( buf ) res . write ( buf ) res = res . getvalue ( ) if fmt : return struct . unpack ( fmt , res ) else : return res
Receive bytes from connection socket or stream .
119
9
241,807
def bytes2num ( s ) : res = 0 for i , c in enumerate ( reversed ( bytearray ( s ) ) ) : res += c << ( i * 8 ) return res
Convert MSB - first bytes to an unsigned integer .
42
12
241,808
def num2bytes ( value , size ) : res = [ ] for _ in range ( size ) : res . append ( value & 0xFF ) value = value >> 8 assert value == 0 return bytes ( bytearray ( list ( reversed ( res ) ) ) )
Convert an unsigned integer to MSB - first bytes with specified size .
58
15
241,809
def frame ( * msgs ) : res = io . BytesIO ( ) for msg in msgs : res . write ( msg ) msg = res . getvalue ( ) return pack ( 'L' , len ( msg ) ) + msg
Serialize MSB - first length - prefixed frame .
51
12
241,810
def split_bits ( value , * bits ) : result = [ ] for b in reversed ( bits ) : mask = ( 1 << b ) - 1 result . append ( value & mask ) value = value >> b assert value == 0 result . reverse ( ) return result
Split integer value into list of ints according to bits list .
56
13
241,811
def readfmt ( stream , fmt ) : size = struct . calcsize ( fmt ) blob = stream . read ( size ) return struct . unpack ( fmt , blob )
Read and unpack an object from stream using a struct format string .
38
14
241,812
def setup_logging ( verbosity , filename = None ) : levels = [ logging . WARNING , logging . INFO , logging . DEBUG ] level = levels [ min ( verbosity , len ( levels ) - 1 ) ] logging . root . setLevel ( level ) fmt = logging . Formatter ( '%(asctime)s %(levelname)-12s %(message)-100s ' '[%(filename)s:%(lineno)d]' ) hdlr = logging . StreamHandler ( ) # stderr hdlr . setFormatter ( fmt ) logging . root . addHandler ( hdlr ) if filename : hdlr = logging . FileHandler ( filename , 'a' ) hdlr . setFormatter ( fmt ) logging . root . addHandler ( hdlr )
Configure logging for this tool .
173
7
241,813
def which ( cmd ) : try : # For Python 3 from shutil import which as _which except ImportError : # For Python 2 from backports . shutil_which import which as _which # pylint: disable=relative-import full_path = _which ( cmd ) if full_path is None : raise OSError ( 'Cannot find {!r} in $PATH' . format ( cmd ) ) log . debug ( 'which %r => %r' , cmd , full_path ) return full_path
Return full path to specified command or raise OSError if missing .
114
15
241,814
def readfmt ( self , fmt ) : size = struct . calcsize ( fmt ) blob = self . read ( size ) obj , = struct . unpack ( fmt , blob ) return obj
Read a specified object using a struct format string .
42
10
241,815
def read ( self , size = None ) : blob = self . s . read ( size ) if size is not None and len ( blob ) < size : raise EOFError if self . _captured : self . _captured . write ( blob ) return blob
Read size bytes from stream .
56
6
241,816
def get ( self ) : if self . timer ( ) > self . deadline : self . value = None return self . value
Returns existing value or None if deadline has expired .
26
10
241,817
def set ( self , value ) : self . deadline = self . timer ( ) + self . duration self . value = value
Set new value and reset the deadline for expiration .
26
10
241,818
def sig_encode ( r , s ) : r = util . assuan_serialize ( util . num2bytes ( r , 32 ) ) s = util . assuan_serialize ( util . num2bytes ( s , 32 ) ) return b'(7:sig-val(5:ecdsa(1:r32:' + r + b')(1:s32:' + s + b')))'
Serialize ECDSA signature data into GPG S - expression .
93
14
241,819
def parse_ecdh ( line ) : prefix , line = line . split ( b' ' , 1 ) assert prefix == b'D' exp , leftover = keyring . parse ( keyring . unescape ( line ) ) log . debug ( 'ECDH s-exp: %r' , exp ) assert not leftover label , exp = exp assert label == b'enc-val' assert exp [ 0 ] == b'ecdh' items = exp [ 1 : ] log . debug ( 'ECDH parameters: %r' , items ) return dict ( items ) [ b'e' ]
Parse ECDH request and return remote public key .
126
12
241,820
def handle_getinfo ( self , conn , args ) : result = None if args [ 0 ] == b'version' : result = self . version elif args [ 0 ] == b's2k_count' : # Use highest number of S2K iterations. # https://www.gnupg.org/documentation/manuals/gnupg/OpenPGP-Options.html # https://tools.ietf.org/html/rfc4880#section-3.7.1.3 result = '{}' . format ( 64 << 20 ) . encode ( 'ascii' ) else : log . warning ( 'Unknown GETINFO command: %s' , args ) if result : keyring . sendline ( conn , b'D ' + result )
Handle some of the GETINFO messages .
171
8
241,821
def handle_scd ( self , conn , args ) : reply = { ( b'GETINFO' , b'version' ) : self . version , } . get ( args ) if reply is None : raise AgentError ( b'ERR 100696144 No such device <SCD>' ) keyring . sendline ( conn , b'D ' + reply )
No support for smart - card device protocol .
80
9
241,822
def get_identity ( self , keygrip ) : keygrip_bytes = binascii . unhexlify ( keygrip ) pubkey_dict , user_ids = decode . load_by_keygrip ( pubkey_bytes = self . pubkey_bytes , keygrip = keygrip_bytes ) # We assume the first user ID is used to generate TREZOR-based GPG keys. user_id = user_ids [ 0 ] [ 'value' ] . decode ( 'utf-8' ) curve_name = protocol . get_curve_name_by_oid ( pubkey_dict [ 'curve_oid' ] ) ecdh = ( pubkey_dict [ 'algo' ] == protocol . ECDH_ALGO_ID ) identity = client . create_identity ( user_id = user_id , curve_name = curve_name ) verifying_key = self . client . pubkey ( identity = identity , ecdh = ecdh ) pubkey = protocol . PublicKey ( curve_name = curve_name , created = pubkey_dict [ 'created' ] , verifying_key = verifying_key , ecdh = ecdh ) assert pubkey . key_id ( ) == pubkey_dict [ 'key_id' ] assert pubkey . keygrip ( ) == keygrip_bytes return identity
Returns device . interface . Identity that matches specified keygrip .
300
13
241,823
def pksign ( self , conn ) : log . debug ( 'signing %r digest (algo #%s)' , self . digest , self . algo ) identity = self . get_identity ( keygrip = self . keygrip ) r , s = self . client . sign ( identity = identity , digest = binascii . unhexlify ( self . digest ) ) result = sig_encode ( r , s ) log . debug ( 'result: %r' , result ) keyring . sendline ( conn , b'D ' + result )
Sign a message digest using a private EC key .
126
10
241,824
def pkdecrypt ( self , conn ) : for msg in [ b'S INQUIRE_MAXLEN 4096' , b'INQUIRE CIPHERTEXT' ] : keyring . sendline ( conn , msg ) line = keyring . recvline ( conn ) assert keyring . recvline ( conn ) == b'END' remote_pubkey = parse_ecdh ( line ) identity = self . get_identity ( keygrip = self . keygrip ) ec_point = self . client . ecdh ( identity = identity , pubkey = remote_pubkey ) keyring . sendline ( conn , b'D ' + _serialize_point ( ec_point ) )
Handle decryption using ECDH .
152
8
241,825
def have_key ( self , * keygrips ) : for keygrip in keygrips : try : self . get_identity ( keygrip = keygrip ) break except KeyError as e : log . warning ( 'HAVEKEY(%s) failed: %s' , keygrip , e ) else : raise AgentError ( b'ERR 67108881 No secret key <GPG Agent>' )
Check if any keygrip corresponds to a TREZOR - based key .
96
16
241,826
def set_hash ( self , algo , digest ) : self . algo = algo self . digest = digest
Set algorithm ID and hexadecimal digest for next operation .
25
13
241,827
def handle ( self , conn ) : keyring . sendline ( conn , b'OK' ) for line in keyring . iterlines ( conn ) : parts = line . split ( b' ' ) command = parts [ 0 ] args = tuple ( parts [ 1 : ] ) if command == b'BYE' : return elif command == b'KILLAGENT' : keyring . sendline ( conn , b'OK' ) raise AgentStop ( ) if command not in self . handlers : log . error ( 'unknown request: %r' , line ) continue handler = self . handlers [ command ] if handler : try : handler ( conn , args ) except AgentError as e : msg , = e . args keyring . sendline ( conn , msg ) continue keyring . sendline ( conn , b'OK' )
Handle connection from GPG binary using the ASSUAN protocol .
177
13
241,828
def connect ( self ) : log . critical ( 'NEVER USE THIS CODE FOR REAL-LIFE USE-CASES!!!' ) log . critical ( 'ONLY FOR DEBUGGING AND TESTING!!!' ) # The code below uses HARD-CODED secret key - and should be used ONLY # for GnuPG integration tests (e.g. when no real device is available). # pylint: disable=attribute-defined-outside-init self . secexp = 1 self . sk = ecdsa . SigningKey . from_secret_exponent ( secexp = self . secexp , curve = ecdsa . curves . NIST256p , hashfunc = hashlib . sha256 ) self . vk = self . sk . get_verifying_key ( ) return self
Return dummy connection .
176
4
241,829
def create_identity ( user_id , curve_name ) : result = interface . Identity ( identity_str = 'gpg://' , curve_name = curve_name ) result . identity_dict [ 'host' ] = user_id return result
Create GPG identity for hardware device .
55
8
241,830
def pubkey ( self , identity , ecdh = False ) : with self . device : pubkey = self . device . pubkey ( ecdh = ecdh , identity = identity ) return formats . decompress_pubkey ( pubkey = pubkey , curve_name = identity . curve_name )
Return public key as VerifyingKey object .
64
9
241,831
def sign ( self , identity , digest ) : log . info ( 'please confirm GPG signature on %s for "%s"...' , self . device , identity . to_string ( ) ) if identity . curve_name == formats . CURVE_NIST256 : digest = digest [ : 32 ] # sign the first 256 bits log . debug ( 'signing digest: %s' , util . hexlify ( digest ) ) with self . device : sig = self . device . sign ( blob = digest , identity = identity ) return ( util . bytes2num ( sig [ : 32 ] ) , util . bytes2num ( sig [ 32 : ] ) )
Sign the digest and return a serialized signature .
142
10
241,832
def ecdh ( self , identity , pubkey ) : log . info ( 'please confirm GPG decryption on %s for "%s"...' , self . device , identity . to_string ( ) ) with self . device : return self . device . ecdh ( pubkey = pubkey , identity = identity )
Derive shared secret using ECDH from remote public key .
67
13
241,833
def connect ( self ) : transport = self . _defs . find_device ( ) if not transport : raise interface . NotFoundError ( '{} not connected' . format ( self ) ) log . debug ( 'using transport: %s' , transport ) for _ in range ( 5 ) : # Retry a few times in case of PIN failures connection = self . _defs . Client ( transport = transport , ui = self . ui , state = self . __class__ . cached_state ) self . _verify_version ( connection ) try : connection . ping ( msg = '' , pin_protection = True ) # unlock PIN return connection except ( self . _defs . PinException , ValueError ) as e : log . error ( 'Invalid PIN: %s, retrying...' , e ) continue except Exception as e : log . exception ( 'ping failed: %s' , e ) connection . close ( ) # so the next HID open() will succeed raise
Enumerate and connect to the first available interface .
211
11
241,834
def string_to_identity ( identity_str ) : m = _identity_regexp . match ( identity_str ) result = m . groupdict ( ) log . debug ( 'parsed identity: %s' , result ) return { k : v for k , v in result . items ( ) if v }
Parse string into Identity dictionary .
71
7
241,835
def identity_to_string ( identity_dict ) : result = [ ] if identity_dict . get ( 'proto' ) : result . append ( identity_dict [ 'proto' ] + '://' ) if identity_dict . get ( 'user' ) : result . append ( identity_dict [ 'user' ] + '@' ) result . append ( identity_dict [ 'host' ] ) if identity_dict . get ( 'port' ) : result . append ( ':' + identity_dict [ 'port' ] ) if identity_dict . get ( 'path' ) : result . append ( identity_dict [ 'path' ] ) log . debug ( 'identity parts: %s' , result ) return '' . join ( result )
Dump Identity dictionary into its string representation .
164
9
241,836
def items ( self ) : return [ ( k , unidecode . unidecode ( v ) ) for k , v in self . identity_dict . items ( ) ]
Return a copy of identity_dict items .
37
9
241,837
def to_bytes ( self ) : s = identity_to_string ( self . identity_dict ) return unidecode . unidecode ( s ) . encode ( 'ascii' )
Transliterate Unicode into ASCII .
42
7
241,838
def get_curve_name ( self , ecdh = False ) : if ecdh : return formats . get_ecdh_curve_name ( self . curve_name ) else : return self . curve_name
Return correct curve name for device operations .
47
8
241,839
def serve ( handler , sock_path , timeout = UNIX_SOCKET_TIMEOUT ) : ssh_version = subprocess . check_output ( [ 'ssh' , '-V' ] , stderr = subprocess . STDOUT ) log . debug ( 'local SSH version: %r' , ssh_version ) environ = { 'SSH_AUTH_SOCK' : sock_path , 'SSH_AGENT_PID' : str ( os . getpid ( ) ) } device_mutex = threading . Lock ( ) with server . unix_domain_socket_server ( sock_path ) as sock : sock . settimeout ( timeout ) quit_event = threading . Event ( ) handle_conn = functools . partial ( server . handle_connection , handler = handler , mutex = device_mutex ) kwargs = dict ( sock = sock , handle_conn = handle_conn , quit_event = quit_event ) with server . spawn ( server . server_thread , kwargs ) : try : yield environ finally : log . debug ( 'closing server' ) quit_event . set ( )
Start the ssh - agent server on a UNIX - domain socket .
252
14
241,840
def run_server ( conn , command , sock_path , debug , timeout ) : ret = 0 try : handler = protocol . Handler ( conn = conn , debug = debug ) with serve ( handler = handler , sock_path = sock_path , timeout = timeout ) as env : if command : ret = server . run_process ( command = command , environ = env ) else : signal . pause ( ) # wait for signal (e.g. SIGINT) except KeyboardInterrupt : log . info ( 'server stopped' ) return ret
Common code for run_agent and run_git below .
114
12
241,841
def handle_connection_error ( func ) : @ functools . wraps ( func ) def wrapper ( * args , * * kwargs ) : try : return func ( * args , * * kwargs ) except device . interface . NotFoundError as e : log . error ( 'Connection error (try unplugging and replugging your device): %s' , e ) return 1 return wrapper
Fail with non - zero exit code .
86
8
241,842
def parse_config ( contents ) : for identity_str , curve_name in re . findall ( r'\<(.*?)\|(.*?)\>' , contents ) : yield device . interface . Identity ( identity_str = identity_str , curve_name = curve_name )
Parse config file into a list of Identity objects .
63
11
241,843
def main ( device_type ) : args = create_agent_parser ( device_type = device_type ) . parse_args ( ) util . setup_logging ( verbosity = args . verbose , filename = args . log_file ) public_keys = None filename = None if args . identity . startswith ( '/' ) : filename = args . identity contents = open ( filename , 'rb' ) . read ( ) . decode ( 'utf-8' ) # Allow loading previously exported SSH public keys if filename . endswith ( '.pub' ) : public_keys = list ( import_public_keys ( contents ) ) identities = list ( parse_config ( contents ) ) else : identities = [ device . interface . Identity ( identity_str = args . identity , curve_name = args . ecdsa_curve_name ) ] for index , identity in enumerate ( identities ) : identity . identity_dict [ 'proto' ] = u'ssh' log . info ( 'identity #%d: %s' , index , identity . to_string ( ) ) # override default PIN/passphrase entry tools (relevant for TREZOR/Keepkey): device_type . ui = device . ui . UI ( device_type = device_type , config = vars ( args ) ) device_type . ui . cached_passphrase_ack = util . ExpiringCache ( args . cache_expiry_seconds ) conn = JustInTimeConnection ( conn_factory = lambda : client . Client ( device_type ( ) ) , identities = identities , public_keys = public_keys ) sock_path = _get_sock_path ( args ) command = args . command context = _dummy_context ( ) if args . connect : command = [ 'ssh' ] + ssh_args ( conn ) + args . command elif args . mosh : command = [ 'mosh' ] + mosh_args ( conn ) + args . command elif args . daemonize : out = 'SSH_AUTH_SOCK={0}; export SSH_AUTH_SOCK;\n' . format ( sock_path ) sys . stdout . write ( out ) sys . stdout . flush ( ) context = daemon . DaemonContext ( ) log . info ( 'running the agent as a daemon on %s' , sock_path ) elif args . foreground : log . info ( 'running the agent on %s' , sock_path ) use_shell = bool ( args . shell ) if use_shell : command = os . environ [ 'SHELL' ] sys . stdin . close ( ) if command or args . daemonize or args . foreground : with context : return run_server ( conn = conn , command = command , sock_path = sock_path , debug = args . debug , timeout = args . timeout ) else : for pk in conn . public_keys ( ) : sys . stdout . write ( pk ) return 0
Run ssh - agent using given hardware client factory .
647
10
241,844
def parse_public_keys ( self ) : public_keys = [ formats . import_public_key ( pk ) for pk in self . public_keys ( ) ] for pk , identity in zip ( public_keys , self . identities ) : pk [ 'identity' ] = identity return public_keys
Parse SSH public keys into dictionaries .
69
9
241,845
def public_keys_as_files ( self ) : if not self . public_keys_tempfiles : for pk in self . public_keys ( ) : f = tempfile . NamedTemporaryFile ( prefix = 'trezor-ssh-pubkey-' , mode = 'w' ) f . write ( pk ) f . flush ( ) self . public_keys_tempfiles . append ( f ) return self . public_keys_tempfiles
Store public keys as temporary SSH identity files .
98
9
241,846
def sign ( self , blob , identity ) : conn = self . conn_factory ( ) return conn . sign_ssh_challenge ( blob = blob , identity = identity )
Sign a given blob using the specified identity on the device .
38
12
241,847
def packet ( tag , blob ) : assert len ( blob ) < 2 ** 32 if len ( blob ) < 2 ** 8 : length_type = 0 elif len ( blob ) < 2 ** 16 : length_type = 1 else : length_type = 2 fmt = [ '>B' , '>H' , '>L' ] [ length_type ] leading_byte = 0x80 | ( tag << 2 ) | ( length_type ) return struct . pack ( '>B' , leading_byte ) + util . prefix_len ( fmt , blob )
Create small GPG packet .
122
6
241,848
def subpacket ( subpacket_type , fmt , * values ) : blob = struct . pack ( fmt , * values ) if values else fmt return struct . pack ( '>B' , subpacket_type ) + blob
Create GPG subpacket .
50
7
241,849
def subpacket_prefix_len ( item ) : n = len ( item ) if n >= 8384 : prefix = b'\xFF' + struct . pack ( '>L' , n ) elif n >= 192 : n = n - 192 prefix = struct . pack ( 'BB' , ( n // 256 ) + 192 , n % 256 ) else : prefix = struct . pack ( 'B' , n ) return prefix + item
Prefix subpacket length according to RFC 4880 section - 5 . 2 . 3 . 1 .
95
21
241,850
def subpackets ( * items ) : prefixed = [ subpacket_prefix_len ( item ) for item in items ] return util . prefix_len ( '>H' , b'' . join ( prefixed ) )
Serialize several GPG subpackets .
49
9
241,851
def mpi ( value ) : bits = value . bit_length ( ) data_size = ( bits + 7 ) // 8 data_bytes = bytearray ( data_size ) for i in range ( data_size ) : data_bytes [ i ] = value & 0xFF value = value >> 8 data_bytes . reverse ( ) return struct . pack ( '>H' , bits ) + bytes ( data_bytes )
Serialize multipresicion integer using GPG format .
93
11
241,852
def keygrip_nist256 ( vk ) : curve = vk . curve . curve gen = vk . curve . generator g = ( 4 << 512 ) | ( gen . x ( ) << 256 ) | gen . y ( ) point = vk . pubkey . point q = ( 4 << 512 ) | ( point . x ( ) << 256 ) | point . y ( ) return _compute_keygrip ( [ [ 'p' , util . num2bytes ( curve . p ( ) , size = 32 ) ] , [ 'a' , util . num2bytes ( curve . a ( ) % curve . p ( ) , size = 32 ) ] , [ 'b' , util . num2bytes ( curve . b ( ) % curve . p ( ) , size = 32 ) ] , [ 'g' , util . num2bytes ( g , size = 65 ) ] , [ 'n' , util . num2bytes ( vk . curve . order , size = 32 ) ] , [ 'q' , util . num2bytes ( q , size = 65 ) ] , ] )
Compute keygrip for NIST256 curve public keys .
239
13
241,853
def keygrip_ed25519 ( vk ) : # pylint: disable=line-too-long return _compute_keygrip ( [ [ 'p' , util . num2bytes ( 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED , size = 32 ) ] , # nopep8 [ 'a' , b'\x01' ] , [ 'b' , util . num2bytes ( 0x2DFC9311D490018C7338BF8688861767FF8FF5B2BEBE27548A14B235ECA6874A , size = 32 ) ] , # nopep8 [ 'g' , util . num2bytes ( 0x04216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A6666666666666666666666666666666666666666666666666666666666666658 , size = 65 ) ] , # nopep8 [ 'n' , util . num2bytes ( 0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED , size = 32 ) ] , # nopep8 [ 'q' , vk . to_bytes ( ) ] , ] )
Compute keygrip for Ed25519 public keys .
294
12
241,854
def keygrip_curve25519 ( vk ) : # pylint: disable=line-too-long return _compute_keygrip ( [ [ 'p' , util . num2bytes ( 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED , size = 32 ) ] , # nopep8 [ 'a' , b'\x01\xDB\x41' ] , [ 'b' , b'\x01' ] , [ 'g' , util . num2bytes ( 0x04000000000000000000000000000000000000000000000000000000000000000920ae19a1b8a086b4e01edd2c7748d14c923d4d7e6d7c61b229e9c5a27eced3d9 , size = 65 ) ] , # nopep8 [ 'n' , util . num2bytes ( 0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED , size = 32 ) ] , # nopep8 [ 'q' , vk . to_bytes ( ) ] , ] )
Compute keygrip for Curve25519 public keys .
254
12
241,855
def get_curve_name_by_oid ( oid ) : for curve_name , info in SUPPORTED_CURVES . items ( ) : if info [ 'oid' ] == oid : return curve_name raise KeyError ( 'Unknown OID: {!r}' . format ( oid ) )
Return curve name matching specified OID or raise KeyError .
71
12
241,856
def make_signature ( signer_func , data_to_sign , public_algo , hashed_subpackets , unhashed_subpackets , sig_type = 0 ) : # pylint: disable=too-many-arguments header = struct . pack ( '>BBBB' , 4 , # version sig_type , # rfc4880 (section-5.2.1) public_algo , 8 ) # hash_alg (SHA256) hashed = subpackets ( * hashed_subpackets ) unhashed = subpackets ( * unhashed_subpackets ) tail = b'\x04\xff' + struct . pack ( '>L' , len ( header ) + len ( hashed ) ) data_to_hash = data_to_sign + header + hashed + tail log . debug ( 'hashing %d bytes' , len ( data_to_hash ) ) digest = hashlib . sha256 ( data_to_hash ) . digest ( ) log . debug ( 'signing digest: %s' , util . hexlify ( digest ) ) params = signer_func ( digest = digest ) sig = b'' . join ( mpi ( p ) for p in params ) return bytes ( header + hashed + unhashed + digest [ : 2 ] + # used for decoder's sanity check sig )
Create new GPG signature .
301
6
241,857
def data ( self ) : header = struct . pack ( '>BLB' , 4 , # version self . created , # creation self . algo_id ) # public key algorithm ID oid = util . prefix_len ( '>B' , self . curve_info [ 'oid' ] ) blob = self . curve_info [ 'serialize' ] ( self . verifying_key ) return header + oid + blob + self . ecdh_packet
Data for packet creation .
100
5
241,858
def create_subkey ( primary_bytes , subkey , signer_func , secret_bytes = b'' ) : subkey_packet = protocol . packet ( tag = ( 7 if secret_bytes else 14 ) , blob = ( subkey . data ( ) + secret_bytes ) ) packets = list ( decode . parse_packets ( io . BytesIO ( primary_bytes ) ) ) primary , user_id , signature = packets [ : 3 ] data_to_sign = primary [ '_to_hash' ] + subkey . data_to_hash ( ) if subkey . ecdh : embedded_sig = None else : # Primary Key Binding Signature hashed_subpackets = [ protocol . subpacket_time ( subkey . created ) ] # signature time unhashed_subpackets = [ protocol . subpacket ( 16 , subkey . key_id ( ) ) ] # issuer key id embedded_sig = protocol . make_signature ( signer_func = signer_func , data_to_sign = data_to_sign , public_algo = subkey . algo_id , sig_type = 0x19 , hashed_subpackets = hashed_subpackets , unhashed_subpackets = unhashed_subpackets ) # Subkey Binding Signature # Key flags: https://tools.ietf.org/html/rfc4880#section-5.2.3.21 # (certify & sign) (encrypt) flags = ( 2 ) if ( not subkey . ecdh ) else ( 4 | 8 ) hashed_subpackets = [ protocol . subpacket_time ( subkey . created ) , # signature time protocol . subpacket_byte ( 0x1B , flags ) ] unhashed_subpackets = [ ] unhashed_subpackets . append ( protocol . subpacket ( 16 , primary [ 'key_id' ] ) ) if embedded_sig is not None : unhashed_subpackets . append ( protocol . subpacket ( 32 , embedded_sig ) ) unhashed_subpackets . append ( protocol . CUSTOM_SUBPACKET ) if not decode . has_custom_subpacket ( signature ) : signer_func = keyring . create_agent_signer ( user_id [ 'value' ] ) signature = protocol . make_signature ( signer_func = signer_func , data_to_sign = data_to_sign , public_algo = primary [ 'algo' ] , sig_type = 0x18 , hashed_subpackets = hashed_subpackets , unhashed_subpackets = unhashed_subpackets ) sign_packet = protocol . packet ( tag = 2 , blob = signature ) return primary_bytes + subkey_packet + sign_packet
Export new subkey to GPG primary key .
629
10
241,859
def verify_gpg_version ( ) : existing_gpg = keyring . gpg_version ( ) . decode ( 'ascii' ) required_gpg = '>=2.1.11' msg = 'Existing GnuPG has version "{}" ({} required)' . format ( existing_gpg , required_gpg ) if not semver . match ( existing_gpg , required_gpg ) : log . error ( msg )
Make sure that the installed GnuPG is not too old .
100
13
241,860
def check_output ( args ) : log . debug ( 'run: %s' , args ) out = subprocess . check_output ( args = args ) . decode ( 'utf-8' ) log . debug ( 'out: %r' , out ) return out
Runs command and returns the output as string .
58
10
241,861
def check_call ( args , stdin = None , env = None ) : log . debug ( 'run: %s%s' , args , ' {}' . format ( env ) if env else '' ) subprocess . check_call ( args = args , stdin = stdin , env = env )
Runs command and verifies its success .
66
9
241,862
def write_file ( path , data ) : with open ( path , 'w' ) as f : log . debug ( 'setting %s contents:\n%s' , path , data ) f . write ( data ) return f
Writes data to specified path .
49
7
241,863
def run_agent ( device_type ) : p = argparse . ArgumentParser ( ) p . add_argument ( '--homedir' , default = os . environ . get ( 'GNUPGHOME' ) ) p . add_argument ( '-v' , '--verbose' , default = 0 , action = 'count' ) p . add_argument ( '--server' , default = False , action = 'store_true' , help = 'Use stdin/stdout for communication with GPG.' ) p . add_argument ( '--pin-entry-binary' , type = str , default = 'pinentry' , help = 'Path to PIN entry UI helper.' ) p . add_argument ( '--passphrase-entry-binary' , type = str , default = 'pinentry' , help = 'Path to passphrase entry UI helper.' ) p . add_argument ( '--cache-expiry-seconds' , type = float , default = float ( 'inf' ) , help = 'Expire passphrase from cache after this duration.' ) args , _ = p . parse_known_args ( ) assert args . homedir log_file = os . path . join ( args . homedir , 'gpg-agent.log' ) util . setup_logging ( verbosity = args . verbose , filename = log_file ) log . debug ( 'sys.argv: %s' , sys . argv ) log . debug ( 'os.environ: %s' , os . environ ) log . debug ( 'pid: %d, parent pid: %d' , os . getpid ( ) , os . getppid ( ) ) try : env = { 'GNUPGHOME' : args . homedir , 'PATH' : os . environ [ 'PATH' ] } pubkey_bytes = keyring . export_public_keys ( env = env ) device_type . ui = device . ui . UI ( device_type = device_type , config = vars ( args ) ) device_type . ui . cached_passphrase_ack = util . ExpiringCache ( seconds = float ( args . cache_expiry_seconds ) ) handler = agent . Handler ( device = device_type ( ) , pubkey_bytes = pubkey_bytes ) sock_server = _server_from_assuan_fd ( os . environ ) if sock_server is None : sock_server = _server_from_sock_path ( env ) with sock_server as sock : for conn in agent . yield_connections ( sock ) : with contextlib . closing ( conn ) : try : handler . handle ( conn ) except agent . AgentStop : log . info ( 'stopping gpg-agent' ) return except IOError as e : log . info ( 'connection closed: %s' , e ) return except Exception as e : # pylint: disable=broad-except log . exception ( 'handler failed: %s' , e ) except Exception as e : # pylint: disable=broad-except log . exception ( 'gpg-agent failed: %s' , e )
Run a simple GPG - agent server .
692
9
241,864
def find_device ( ) : try : return get_transport ( os . environ . get ( "TREZOR_PATH" ) ) except Exception as e : # pylint: disable=broad-except log . debug ( "Failed to find a Trezor device: %s" , e )
Selects a transport based on TREZOR_PATH environment variable .
68
14
241,865
def _convert_public_key ( ecdsa_curve_name , result ) : if ecdsa_curve_name == 'nist256p1' : if ( result [ 64 ] & 1 ) != 0 : result = bytearray ( [ 0x03 ] ) + result [ 1 : 33 ] else : result = bytearray ( [ 0x02 ] ) + result [ 1 : 33 ] else : result = result [ 1 : ] keyX = bytearray ( result [ 0 : 32 ] ) keyY = bytearray ( result [ 32 : ] [ : : - 1 ] ) if ( keyX [ 31 ] & 1 ) != 0 : keyY [ 31 ] |= 0x80 result = b'\x00' + bytes ( keyY ) return bytes ( result )
Convert Ledger reply into PublicKey object .
179
10
241,866
def connect ( self ) : try : return comm . getDongle ( ) except comm . CommException as e : raise interface . NotFoundError ( '{} not connected: "{}"' . format ( self , e ) )
Enumerate and connect to the first USB HID interface .
49
13
241,867
def pubkey ( self , identity , ecdh = False ) : curve_name = identity . get_curve_name ( ecdh ) path = _expand_path ( identity . get_bip32_address ( ecdh ) ) if curve_name == 'nist256p1' : p2 = '01' else : p2 = '02' apdu = '800200' + p2 apdu = binascii . unhexlify ( apdu ) apdu += bytearray ( [ len ( path ) + 1 , len ( path ) // 4 ] ) apdu += path log . debug ( 'apdu: %r' , apdu ) result = bytearray ( self . conn . exchange ( bytes ( apdu ) ) ) log . debug ( 'result: %r' , result ) return _convert_public_key ( curve_name , result [ 1 : ] )
Get PublicKey object for specified BIP32 address and elliptic curve .
200
15
241,868
def download_setuptools ( version = DEFAULT_VERSION , download_base = DEFAULT_URL , to_dir = os . curdir , delay = 15 ) : # making sure we use the absolute path to_dir = os . path . abspath ( to_dir ) try : from urllib . request import urlopen except ImportError : from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os . path . join ( to_dir , tgz_name ) src = dst = None if not os . path . exists ( saveto ) : # Avoid repeated downloads try : log . warn ( "Downloading %s" , url ) src = urlopen ( url ) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src . read ( ) dst = open ( saveto , "wb" ) dst . write ( data ) finally : if src : src . close ( ) if dst : dst . close ( ) return os . path . realpath ( saveto )
Download distribute from a specified location and return its filename
249
10
241,869
def tokenize ( stream , separator ) : for value in stream : for token in value . split ( separator ) : if token : yield token . strip ( )
Tokenize and yield query parameter values .
35
8
241,870
def build_query ( self , * * filters ) : applicable_filters = [ ] applicable_exclusions = [ ] for param , value in filters . items ( ) : excluding_term = False param_parts = param . split ( "__" ) base_param = param_parts [ 0 ] # only test against field without lookup negation_keyword = constants . DRF_HAYSTACK_NEGATION_KEYWORD if len ( param_parts ) > 1 and param_parts [ 1 ] == negation_keyword : excluding_term = True param = param . replace ( "__%s" % negation_keyword , "" ) # haystack wouldn't understand our negation if self . view . serializer_class : if hasattr ( self . view . serializer_class . Meta , 'field_aliases' ) : old_base = base_param base_param = self . view . serializer_class . Meta . field_aliases . get ( base_param , base_param ) param = param . replace ( old_base , base_param ) # need to replace the alias fields = getattr ( self . view . serializer_class . Meta , 'fields' , [ ] ) exclude = getattr ( self . view . serializer_class . Meta , 'exclude' , [ ] ) search_fields = getattr ( self . view . serializer_class . Meta , 'search_fields' , [ ] ) # Skip if the parameter is not listed in the serializer's `fields` # or if it's in the `exclude` list. if ( ( fields or search_fields ) and base_param not in chain ( fields , search_fields ) ) or base_param in exclude or not value : continue field_queries = [ ] if len ( param_parts ) > 1 and param_parts [ - 1 ] in ( 'in' , 'range' ) : # `in` and `range` filters expects a list of values field_queries . append ( self . view . query_object ( ( param , list ( self . tokenize ( value , self . view . lookup_sep ) ) ) ) ) else : for token in self . tokenize ( value , self . view . lookup_sep ) : field_queries . append ( self . view . query_object ( ( param , token ) ) ) field_queries = [ fq for fq in field_queries if fq ] if len ( field_queries ) > 0 : term = six . moves . reduce ( operator . or_ , field_queries ) if excluding_term : applicable_exclusions . append ( term ) else : applicable_filters . append ( term ) applicable_filters = six . moves . reduce ( self . default_operator , filter ( lambda x : x , applicable_filters ) ) if applicable_filters else [ ] applicable_exclusions = six . moves . reduce ( self . default_operator , filter ( lambda x : x , applicable_exclusions ) ) if applicable_exclusions else [ ] return applicable_filters , applicable_exclusions
Creates a single SQ filter from querystring parameters that correspond to the SearchIndex fields that have been registered in view . fields .
672
26
241,871
def build_query ( self , * * filters ) : field_facets = { } date_facets = { } query_facets = { } facet_serializer_cls = self . view . get_facet_serializer_class ( ) if self . view . lookup_sep == ":" : raise AttributeError ( "The %(cls)s.lookup_sep attribute conflicts with the HaystackFacetFilter " "query parameter parser. Please choose another `lookup_sep` attribute " "for %(cls)s." % { "cls" : self . view . __class__ . __name__ } ) fields = facet_serializer_cls . Meta . fields exclude = facet_serializer_cls . Meta . exclude field_options = facet_serializer_cls . Meta . field_options for field , options in filters . items ( ) : if field not in fields or field in exclude : continue field_options = merge_dict ( field_options , { field : self . parse_field_options ( self . view . lookup_sep , * options ) } ) valid_gap = ( "year" , "month" , "day" , "hour" , "minute" , "second" ) for field , options in field_options . items ( ) : if any ( [ k in options for k in ( "start_date" , "end_date" , "gap_by" , "gap_amount" ) ] ) : if not all ( ( "start_date" , "end_date" , "gap_by" in options ) ) : raise ValueError ( "Date faceting requires at least 'start_date', 'end_date' " "and 'gap_by' to be set." ) if not options [ "gap_by" ] in valid_gap : raise ValueError ( "The 'gap_by' parameter must be one of %s." % ", " . join ( valid_gap ) ) options . setdefault ( "gap_amount" , 1 ) date_facets [ field ] = field_options [ field ] else : field_facets [ field ] = field_options [ field ] return { "date_facets" : date_facets , "field_facets" : field_facets , "query_facets" : query_facets }
Creates a dict of dictionaries suitable for passing to the SearchQuerySet facet date_facet or query_facet method . All key word arguments should be wrapped in a list .
513
38
241,872
def parse_field_options ( self , * options ) : defaults = { } for option in options : if isinstance ( option , six . text_type ) : tokens = [ token . strip ( ) for token in option . split ( self . view . lookup_sep ) ] for token in tokens : if not len ( token . split ( ":" ) ) == 2 : warnings . warn ( "The %s token is not properly formatted. Tokens need to be " "formatted as 'token:value' pairs." % token ) continue param , value = token . split ( ":" , 1 ) if any ( [ k == param for k in ( "start_date" , "end_date" , "gap_amount" ) ] ) : if param in ( "start_date" , "end_date" ) : value = parser . parse ( value ) if param == "gap_amount" : value = int ( value ) defaults [ param ] = value return defaults
Parse the field options query string and return it as a dictionary .
207
14
241,873
def build_query ( self , * * filters ) : applicable_filters = None filters = dict ( ( k , filters [ k ] ) for k in chain ( self . D . UNITS . keys ( ) , [ constants . DRF_HAYSTACK_SPATIAL_QUERY_PARAM ] ) if k in filters ) distance = dict ( ( k , v ) for k , v in filters . items ( ) if k in self . D . UNITS . keys ( ) ) try : latitude , longitude = map ( float , self . tokenize ( filters [ constants . DRF_HAYSTACK_SPATIAL_QUERY_PARAM ] , self . view . lookup_sep ) ) point = self . Point ( longitude , latitude , srid = constants . GEO_SRID ) except ValueError : raise ValueError ( "Cannot convert `from=latitude,longitude` query parameter to " "float values. Make sure to provide numerical values only!" ) except KeyError : # If the user has not provided any `from` query string parameter, # just return. pass else : for unit in distance . keys ( ) : if not len ( distance [ unit ] ) == 1 : raise ValueError ( "Each unit must have exactly one value." ) distance [ unit ] = float ( distance [ unit ] [ 0 ] ) if point and distance : applicable_filters = { "dwithin" : { "field" : self . backend . point_field , "point" : point , "distance" : self . D ( * * distance ) } , "distance" : { "field" : self . backend . point_field , "point" : point } } return applicable_filters
Build queries for geo spatial filtering .
369
7
241,874
def merge_dict ( a , b ) : if not isinstance ( b , dict ) : return b result = deepcopy ( a ) for key , val in six . iteritems ( b ) : if key in result and isinstance ( result [ key ] , dict ) : result [ key ] = merge_dict ( result [ key ] , val ) elif key in result and isinstance ( result [ key ] , list ) : result [ key ] = sorted ( list ( set ( val ) | set ( result [ key ] ) ) ) else : result [ key ] = deepcopy ( val ) return result
Recursively merges and returns dict a with dict b . Any list values will be combined and returned sorted .
128
23
241,875
def get_queryset ( self , index_models = [ ] ) : if self . queryset is not None and isinstance ( self . queryset , self . object_class ) : queryset = self . queryset . all ( ) else : queryset = self . object_class ( ) . _clone ( ) if len ( index_models ) : queryset = queryset . models ( * index_models ) elif len ( self . index_models ) : queryset = queryset . models ( * self . index_models ) return queryset
Get the list of items for this view . Returns self . queryset if defined and is a self . object_class instance .
129
27
241,876
def get_object ( self ) : queryset = self . get_queryset ( ) if "model" in self . request . query_params : try : app_label , model = map ( six . text_type . lower , self . request . query_params [ "model" ] . split ( "." , 1 ) ) ctype = ContentType . objects . get ( app_label = app_label , model = model ) queryset = self . get_queryset ( index_models = [ ctype . model_class ( ) ] ) except ( ValueError , ContentType . DoesNotExist ) : raise Http404 ( "Could not find any models matching '%s'. Make sure to use a valid " "'app_label.model' name for the 'model' query parameter." % self . request . query_params [ "model" ] ) lookup_url_kwarg = self . lookup_url_kwarg or self . lookup_field if lookup_url_kwarg not in self . kwargs : raise AttributeError ( "Expected view %s to be called with a URL keyword argument " "named '%s'. Fix your URL conf, or set the `.lookup_field` " "attribute on the view correctly." % ( self . __class__ . __name__ , lookup_url_kwarg ) ) queryset = queryset . filter ( self . query_object ( ( self . document_uid_field , self . kwargs [ lookup_url_kwarg ] ) ) ) count = queryset . count ( ) if count == 1 : return queryset [ 0 ] elif count > 1 : raise Http404 ( "Multiple results matches the given query. Expected a single result." ) raise Http404 ( "No result matches the given query." )
Fetch a single document from the data store according to whatever unique identifier is available for that document in the SearchIndex .
396
24
241,877
def more_like_this ( self , request , pk = None ) : obj = self . get_object ( ) . object queryset = self . filter_queryset ( self . get_queryset ( ) ) . more_like_this ( obj ) page = self . paginate_queryset ( queryset ) if page is not None : serializer = self . get_serializer ( page , many = True ) return self . get_paginated_response ( serializer . data ) serializer = self . get_serializer ( queryset , many = True ) return Response ( serializer . data )
Sets up a detail route for more - like - this results . Note that you ll need backend support in order to take advantage of this .
138
29
241,878
def filter_facet_queryset ( self , queryset ) : for backend in list ( self . facet_filter_backends ) : queryset = backend ( ) . filter_queryset ( self . request , queryset , self ) if self . load_all : queryset = queryset . load_all ( ) return queryset
Given a search queryset filter it with whichever facet filter backends in use .
80
17
241,879
def get_facet_serializer ( self , * args , * * kwargs ) : assert "objects" in kwargs , "`objects` is a required argument to `get_facet_serializer()`" facet_serializer_class = self . get_facet_serializer_class ( ) kwargs [ "context" ] = self . get_serializer_context ( ) kwargs [ "context" ] . update ( { "objects" : kwargs . pop ( "objects" ) , "facet_query_params_text" : self . facet_query_params_text , } ) return facet_serializer_class ( * args , * * kwargs )
Return the facet serializer instance that should be used for serializing faceted output .
156
17
241,880
def get_facet_serializer_class ( self ) : if self . facet_serializer_class is None : raise AttributeError ( "%(cls)s should either include a `facet_serializer_class` attribute, " "or override %(cls)s.get_facet_serializer_class() method." % { "cls" : self . __class__ . __name__ } ) return self . facet_serializer_class
Return the class to use for serializing facets . Defaults to using self . facet_serializer_class .
102
23
241,881
def get_facet_objects_serializer ( self , * args , * * kwargs ) : facet_objects_serializer_class = self . get_facet_objects_serializer_class ( ) kwargs [ "context" ] = self . get_serializer_context ( ) return facet_objects_serializer_class ( * args , * * kwargs )
Return the serializer instance which should be used for serializing faceted objects .
85
16
241,882
def bind ( self , field_name , parent ) : # In order to enforce a consistent style, we error if a redundant # 'source' argument has been used. For example: # my_field = serializer.CharField(source='my_field') assert self . source != field_name , ( "It is redundant to specify `source='%s'` on field '%s' in " "serializer '%s', because it is the same as the field name. " "Remove the `source` keyword argument." % ( field_name , self . __class__ . __name__ , parent . __class__ . __name__ ) ) self . field_name = field_name self . parent = parent # `self.label` should default to being based on the field name. if self . label is None : self . label = field_name . replace ( '_' , ' ' ) . capitalize ( ) # self.source should default to being the same as the field name. if self . source is None : self . source = self . convert_field_name ( field_name ) # self.source_attrs is a list of attributes that need to be looked up # when serializing the instance, or populating the validated data. if self . source == '*' : self . source_attrs = [ ] else : self . source_attrs = self . source . split ( '.' )
Initializes the field name and parent for the field instance . Called when a field is added to the parent serializer instance . Taken from DRF and modified to support drf_haystack multiple index functionality .
304
43
241,883
def _get_default_field_kwargs ( model , field ) : kwargs = { } try : field_name = field . model_attr or field . index_fieldname model_field = model . _meta . get_field ( field_name ) kwargs . update ( get_field_kwargs ( field_name , model_field ) ) # Remove stuff we don't care about! delete_attrs = [ "allow_blank" , "choices" , "model_field" , "allow_unicode" , ] for attr in delete_attrs : if attr in kwargs : del kwargs [ attr ] except FieldDoesNotExist : pass return kwargs
Get the required attributes from the model field in order to instantiate a REST Framework serializer field .
156
20
241,884
def _get_index_class_name ( self , index_cls ) : cls_name = index_cls . __name__ aliases = self . Meta . index_aliases return aliases . get ( cls_name , cls_name . split ( '.' ) [ - 1 ] )
Converts in index model class to a name suitable for use as a field name prefix . A user may optionally specify custom aliases via an index_aliases attribute on the Meta class
66
36
241,885
def get_fields ( self ) : fields = self . Meta . fields exclude = self . Meta . exclude ignore_fields = self . Meta . ignore_fields indices = self . Meta . index_classes declared_fields = copy . deepcopy ( self . _declared_fields ) prefix_field_names = len ( indices ) > 1 field_mapping = OrderedDict ( ) # overlapping fields on multiple indices is supported by internally prefixing the field # names with the index class to which they belong or, optionally, a user-provided alias # for the index. for index_cls in self . Meta . index_classes : prefix = "" if prefix_field_names : prefix = "_%s__" % self . _get_index_class_name ( index_cls ) for field_name , field_type in six . iteritems ( index_cls . fields ) : orig_name = field_name field_name = "%s%s" % ( prefix , field_name ) # Don't use this field if it is in `ignore_fields` if orig_name in ignore_fields or field_name in ignore_fields : continue # When fields to include are decided by `exclude` if exclude : if orig_name in exclude or field_name in exclude : continue # When fields to include are decided by `fields` if fields : if orig_name not in fields and field_name not in fields : continue # Look up the field attributes on the current index model, # in order to correctly instantiate the serializer field. model = index_cls ( ) . get_model ( ) kwargs = self . _get_default_field_kwargs ( model , field_type ) kwargs [ 'prefix_field_names' ] = prefix_field_names field_mapping [ field_name ] = self . _field_mapping [ field_type ] ( * * kwargs ) # Add any explicitly declared fields. They *will* override any index fields # in case of naming collision!. if declared_fields : for field_name in declared_fields : field_mapping [ field_name ] = declared_fields [ field_name ] return field_mapping
Get the required fields for serializing the result .
472
10
241,886
def to_representation ( self , instance ) : if self . Meta . serializers : ret = self . multi_serializer_representation ( instance ) else : ret = super ( HaystackSerializer , self ) . to_representation ( instance ) prefix_field_names = len ( getattr ( self . Meta , "index_classes" ) ) > 1 current_index = self . _get_index_class_name ( type ( instance . searchindex ) ) for field in self . fields . keys ( ) : # handle declared field value methods on serializer value_method = getattr ( self , "get_{}" . format ( field ) , None ) if value_method and callable ( value_method ) : ret [ field ] = value_method ( ) # now convert namespaced field names orig_field = field if prefix_field_names : parts = field . split ( "__" ) if len ( parts ) > 1 : index = parts [ 0 ] [ 1 : ] # trim the preceding '_' field = parts [ 1 ] if index == current_index : ret [ field ] = ret [ orig_field ] del ret [ orig_field ] elif field not in chain ( instance . searchindex . fields . keys ( ) , self . _declared_fields . keys ( ) ) : del ret [ orig_field ] # include the highlighted field in either case if getattr ( instance , "highlighted" , None ) : ret [ "highlighted" ] = instance . highlighted [ 0 ] return ret
If we have a serializer mapping use that . Otherwise use standard serializer behavior Since we might be dealing with multiple indexes some fields might not be valid for all results . Do not render the fields which don t belong to the search result .
328
48
241,887
def get_narrow_url ( self , instance ) : text = instance [ 0 ] request = self . context [ "request" ] query_params = request . GET . copy ( ) # Never keep the page query parameter in narrowing urls. # It will raise a NotFound exception when trying to paginate a narrowed queryset. page_query_param = self . get_paginate_by_param ( ) if page_query_param and page_query_param in query_params : del query_params [ page_query_param ] selected_facets = set ( query_params . pop ( self . root . facet_query_params_text , [ ] ) ) selected_facets . add ( "%(field)s_exact:%(text)s" % { "field" : self . parent_field , "text" : text } ) query_params . setlist ( self . root . facet_query_params_text , sorted ( selected_facets ) ) path = "%(path)s?%(query)s" % { "path" : request . path_info , "query" : query_params . urlencode ( ) } url = request . build_absolute_uri ( path ) return serializers . Hyperlink ( url , "narrow-url" )
Return a link suitable for narrowing on the current item .
283
11
241,888
def to_representation ( self , field , instance ) : self . parent_field = field return super ( FacetFieldSerializer , self ) . to_representation ( instance )
Set the parent_field property equal to the current field on the serializer class so that each field can query it to see what kind of attribute they are processing .
39
33
241,889
def get_fields ( self ) : field_mapping = OrderedDict ( ) for field , data in self . instance . items ( ) : field_mapping . update ( { field : self . facet_dict_field_class ( child = self . facet_list_field_class ( child = self . facet_field_serializer_class ( data ) ) , required = False ) } ) if self . serialize_objects is True : field_mapping [ "objects" ] = serializers . SerializerMethodField ( ) return field_mapping
This returns a dictionary containing the top most fields dates fields and queries .
121
14
241,890
def get_objects ( self , instance ) : view = self . context [ "view" ] queryset = self . context [ "objects" ] page = view . paginate_queryset ( queryset ) if page is not None : serializer = view . get_facet_objects_serializer ( page , many = True ) return OrderedDict ( [ ( "count" , self . get_count ( queryset ) ) , ( "next" , view . paginator . get_next_link ( ) ) , ( "previous" , view . paginator . get_previous_link ( ) ) , ( "results" , serializer . data ) ] ) serializer = view . get_serializer ( queryset , many = True ) return serializer . data
Return a list of objects matching the faceted result .
173
11
241,891
def get_document_field ( instance ) : for name , field in instance . searchindex . fields . items ( ) : if field . document is True : return name
Returns which field the search index has marked as it s document = True field .
35
16
241,892
def apply_filters ( self , queryset , applicable_filters = None , applicable_exclusions = None ) : if applicable_filters : queryset = queryset . filter ( applicable_filters ) if applicable_exclusions : queryset = queryset . exclude ( applicable_exclusions ) return queryset
Apply constructed filters and excludes and return the queryset
73
11
241,893
def build_filters ( self , view , filters = None ) : query_builder = self . get_query_builder ( backend = self , view = view ) return query_builder . build_query ( * * ( filters if filters else { } ) )
Get the query builder instance and return constructed query filters .
55
11
241,894
def filter_queryset ( self , request , queryset , view ) : applicable_filters , applicable_exclusions = self . build_filters ( view , filters = self . get_request_filters ( request ) ) return self . apply_filters ( queryset = queryset , applicable_filters = self . process_filters ( applicable_filters , queryset , view ) , applicable_exclusions = self . process_filters ( applicable_exclusions , queryset , view ) )
Return the filtered queryset .
115
7
241,895
def get_query_builder ( self , * args , * * kwargs ) : query_builder = self . get_query_builder_class ( ) return query_builder ( * args , * * kwargs )
Return the query builder class instance that should be used to build the query which is passed to the search engine backend .
48
23
241,896
def apply_filters ( self , queryset , applicable_filters = None , applicable_exclusions = None ) : for field , options in applicable_filters [ "field_facets" ] . items ( ) : queryset = queryset . facet ( field , * * options ) for field , options in applicable_filters [ "date_facets" ] . items ( ) : queryset = queryset . date_facet ( field , * * options ) for field , options in applicable_filters [ "query_facets" ] . items ( ) : queryset = queryset . query_facet ( field , * * options ) return queryset
Apply faceting to the queryset
151
8
241,897
def __convert_to_df ( a , val_col = None , group_col = None , val_id = None , group_id = None ) : if not group_col : group_col = 'groups' if not val_col : val_col = 'vals' if isinstance ( a , DataFrame ) : x = a . copy ( ) if not { group_col , val_col } . issubset ( a . columns ) : raise ValueError ( 'Specify correct column names using `group_col` and `val_col` args' ) return x , val_col , group_col elif isinstance ( a , list ) or ( isinstance ( a , np . ndarray ) and not a . shape . count ( 2 ) ) : grps_len = map ( len , a ) grps = list ( it . chain ( * [ [ i + 1 ] * l for i , l in enumerate ( grps_len ) ] ) ) vals = list ( it . chain ( * a ) ) return DataFrame ( { val_col : vals , group_col : grps } ) , val_col , group_col elif isinstance ( a , np . ndarray ) : # cols ids not defined # trying to infer if not ( all ( [ val_id , group_id ] ) ) : if np . argmax ( a . shape ) : a = a . T ax = [ np . unique ( a [ : , 0 ] ) . size , np . unique ( a [ : , 1 ] ) . size ] if np . asscalar ( np . diff ( ax ) ) : __val_col = np . argmax ( ax ) __group_col = np . argmin ( ax ) else : raise ValueError ( 'Cannot infer input format.\nPlease specify `val_id` and `group_id` args' ) cols = { __val_col : val_col , __group_col : group_col } else : cols = { val_id : val_col , group_id : group_col } cols_vals = dict ( sorted ( cols . items ( ) ) ) . values ( ) return DataFrame ( a , columns = cols_vals ) , val_col , group_col
Hidden helper method to create a DataFrame with input data for further processing .
500
15
241,898
def posthoc_tukey_hsd ( x , g , alpha = 0.05 ) : result = pairwise_tukeyhsd ( x , g , alpha = 0.05 ) groups = np . array ( result . groupsunique , dtype = np . str ) groups_len = len ( groups ) vs = np . zeros ( ( groups_len , groups_len ) , dtype = np . int ) for a in result . summary ( ) [ 1 : ] : a0 = str ( a [ 0 ] ) a1 = str ( a [ 1 ] ) a0i = np . where ( groups == a0 ) [ 0 ] [ 0 ] a1i = np . where ( groups == a1 ) [ 0 ] [ 0 ] vs [ a0i , a1i ] = 1 if str ( a [ 5 ] ) == 'True' else 0 vs = np . triu ( vs ) np . fill_diagonal ( vs , - 1 ) tri_lower = np . tril_indices ( vs . shape [ 0 ] , - 1 ) vs [ tri_lower ] = vs . T [ tri_lower ] return DataFrame ( vs , index = groups , columns = groups )
Pairwise comparisons with TukeyHSD confidence intervals . This is a convenience function to make statsmodels pairwise_tukeyhsd method more applicable for further use .
261
35
241,899
def posthoc_mannwhitney ( a , val_col = None , group_col = None , use_continuity = True , alternative = 'two-sided' , p_adjust = None , sort = True ) : x , _val_col , _group_col = __convert_to_df ( a , val_col , group_col ) if not sort : x [ _group_col ] = Categorical ( x [ _group_col ] , categories = x [ _group_col ] . unique ( ) , ordered = True ) x . sort_values ( by = [ _group_col , _val_col ] , ascending = True , inplace = True ) groups = np . unique ( x [ _group_col ] ) x_len = groups . size vs = np . zeros ( ( x_len , x_len ) ) tri_upper = np . triu_indices ( vs . shape [ 0 ] , 1 ) tri_lower = np . tril_indices ( vs . shape [ 0 ] , - 1 ) vs [ : , : ] = 0 combs = it . combinations ( range ( x_len ) , 2 ) for i , j in combs : vs [ i , j ] = ss . mannwhitneyu ( x . loc [ x [ _group_col ] == groups [ i ] , _val_col ] , x . loc [ x [ _group_col ] == groups [ j ] , _val_col ] , use_continuity = use_continuity , alternative = alternative ) [ 1 ] if p_adjust : vs [ tri_upper ] = multipletests ( vs [ tri_upper ] , method = p_adjust ) [ 1 ] vs [ tri_lower ] = vs . T [ tri_lower ] np . fill_diagonal ( vs , - 1 ) return DataFrame ( vs , index = groups , columns = groups )
Pairwise comparisons with Mann - Whitney rank test .
416
11