idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
23,100
def _src_path_stats ( self , src_path ) : violation_lines = self . violation_lines ( src_path ) violations = sorted ( self . _diff_violations ( ) [ src_path ] . violations ) if self . INCLUDE_SNIPPETS : try : snippets = Snippet . load_snippets_html ( src_path , violation_lines ) except IOError : snippets = [ ] else : snippets = [ ] return { 'percent_covered' : self . percent_covered ( src_path ) , 'violation_lines' : TemplateReportGenerator . combine_adjacent_lines ( violation_lines ) , 'violations' : violations , 'snippets_html' : snippets }
Return a dict of statistics for the source file at src_path .
23,101
def run_command_for_code ( command ) : process = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) process . communicate ( ) exit_code = process . returncode return exit_code
Returns command s exit code .
23,102
def style_defs ( cls ) : formatter = HtmlFormatter ( ) formatter . style . highlight_color = cls . VIOLATION_COLOR return formatter . get_style_defs ( )
Return the CSS style definitions required by the formatted snippet .
23,103
def html ( self ) : formatter = HtmlFormatter ( cssclass = self . DIV_CSS_CLASS , linenos = True , linenostart = self . _start_line , hl_lines = self . _shift_lines ( self . _violation_lines , self . _start_line ) , lineanchors = self . _src_filename ) return pygments . format ( self . src_tokens ( ) , formatter )
Return an HTML representation of the snippet .
23,104
def load_snippets_html ( cls , src_path , violation_lines ) : snippet_list = cls . load_snippets ( src_path , violation_lines ) return [ snippet . html ( ) for snippet in snippet_list ]
Load snippets from the file at src_path and format them as HTML .
23,105
def _group_tokens ( cls , token_stream , range_list ) : token_map = { rng : [ ] for rng in range_list } line_num = 1 for ttype , val in token_stream : if '\n' in val : val_lines = val . split ( '\n' ) for ( start , end ) , filtered_tokens in six . iteritems ( token_map ) : include_vals = [ val_lines [ i ] for i in range ( 0 , len ( val_lines ) ) if i + line_num in range ( start , end + 1 ) ] if len ( include_vals ) > 0 : token = ( ttype , '\n' . join ( include_vals ) ) filtered_tokens . append ( token ) line_num += len ( val_lines ) - 1 else : for ( start , end ) , filtered_tokens in six . iteritems ( token_map ) : if line_num in range ( start , end + 1 ) : filtered_tokens . append ( ( ttype , val ) ) return token_map
Group tokens into snippet ranges .
23,106
def violations ( self , src_path ) : if not any ( src_path . endswith ( ext ) for ext in self . driver . supported_extensions ) : return [ ] if src_path not in self . violations_dict : if self . reports : self . violations_dict = self . driver . parse_reports ( self . reports ) else : if self . driver_tool_installed is None : self . driver_tool_installed = self . driver . installed ( ) if not self . driver_tool_installed : raise EnvironmentError ( "{} is not installed" . format ( self . driver . name ) ) command = copy . deepcopy ( self . driver . command ) if self . options : command . append ( self . options ) if os . path . exists ( src_path ) : command . append ( src_path . encode ( sys . getfilesystemencoding ( ) ) ) output , _ = execute ( command , self . driver . exit_codes ) self . violations_dict . update ( self . driver . parse_reports ( [ output ] ) ) return self . violations_dict [ src_path ]
Return a list of Violations recorded in src_path .
23,107
def configure ( self , transport , auth , address , port ) : self . transport = transport self . username = auth . username self . address = address self . port = port
Connect paramiko transport
23,108
def start ( self , local_port , remote_address , remote_port ) : self . local_port = local_port self . remote_address = remote_address self . remote_port = remote_port logger . debug ( ( "Starting ssh tunnel {0}:{1}:{2} for " "{3}@{4}" . format ( local_port , remote_address , remote_port , self . username , self . address ) ) ) self . forward = Forward ( local_port , remote_address , remote_port , self . transport ) self . forward . start ( )
Start ssh tunnel
23,109
def cleanup ( self ) : if self . local_port is not None : logger . debug ( ( "Stopping ssh tunnel {0}:{1}:{2} for " "{3}@{4}" . format ( self . local_port , self . remote_address , self . remote_port , self . username , self . address ) ) ) if self . forward is not None : self . forward . stop ( ) self . forward . join ( ) if self . transport is not None : self . transport . close ( )
Cleanup resources used during execution
23,110
def set_stream_logger ( name = 'margaritashotgun' , level = logging . INFO , format_string = None ) : if format_string is None : format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" time_format = "%Y-%m-%dT%H:%M:%S" logger = logging . getLogger ( name ) logger . setLevel ( level ) handler = logging . StreamHandler ( ) handler . setLevel ( level ) formatter = logging . Formatter ( format_string , time_format ) handler . setFormatter ( formatter ) logger . addHandler ( handler ) paramiko_log_level = logging . CRITICAL paramiko_log = logging . getLogger ( 'paramiko' ) paramiko_log . setLevel ( paramiko_log_level ) paramiko_handler = logging . StreamHandler ( ) paramiko_handler . setLevel ( paramiko_log_level ) paramiko_handler . setFormatter ( formatter ) paramiko_log . addHandler ( paramiko_handler )
Add a stream handler for the provided name and level to the logging module .
23,111
def connect ( self , username , password , key , address , port , jump_host ) : if port is None : self . remote_port = 22 else : self . remote_port = int ( port ) auth = Auth ( username = username , password = password , key = key ) if jump_host is not None : jump_auth = Auth ( username = jump_host [ 'username' ] , password = jump_host [ 'password' ] , key = jump_host [ 'key' ] ) if jump_host [ 'port' ] is None : jump_host [ 'port' ] = 22 else : jump_auth = None self . shell . connect ( auth , address , self . remote_port , jump_host , jump_auth ) transport = self . shell . transport ( ) self . tunnel . configure ( transport , auth , address , self . remote_port ) self . remote_addr = address
Connect ssh tunnel and shell executor to remote host
23,112
def start_tunnel ( self , local_port , remote_address , remote_port ) : self . tunnel . start ( local_port , remote_address , remote_port ) self . tunnel_port = local_port
Start ssh forward tunnel
23,113
def mem_size ( self ) : result = self . shell . execute ( self . commands . mem_size . value ) stdout = self . shell . decode ( result [ 'stdout' ] ) stderr = self . shell . decode ( result [ 'stderr' ] ) return int ( stdout )
Returns the memory size in bytes of the remote host
23,114
def kernel_version ( self ) : result = self . shell . execute ( self . commands . kernel_version . value ) stdout = self . shell . decode ( result [ 'stdout' ] ) stderr = self . shell . decode ( result [ 'stderr' ] ) return stdout
Returns the kernel kernel version of the remote host
23,115
def wait_for_lime ( self , listen_port , listen_address = "0.0.0.0" , max_tries = 20 , wait = 1 ) : tries = 0 pattern = self . commands . lime_pattern . value . format ( listen_address , listen_port ) lime_loaded = False while tries < max_tries and lime_loaded is False : lime_loaded = self . check_for_lime ( pattern ) tries = tries + 1 time . sleep ( wait ) return lime_loaded
Wait for lime to load unless max_retries is exceeded
23,116
def check_for_lime ( self , pattern ) : check = self . commands . lime_check . value lime_loaded = False result = self . shell . execute ( check ) stdout = self . shell . decode ( result [ 'stdout' ] ) connections = self . net_parser . parse ( stdout ) for conn in connections : local_addr , remote_addr = conn if local_addr == pattern : lime_loaded = True break return lime_loaded
Check to see if LiME has loaded on the remote system
23,117
def upload_module ( self , local_path = None , remote_path = "/tmp/lime.ko" ) : if local_path is None : raise FileNotFoundFoundError ( local_path ) self . shell . upload_file ( local_path , remote_path )
Upload LiME kernel module to remote host
23,118
def load_lime ( self , remote_path , listen_port , dump_format = 'lime' ) : load_command = self . commands . load_lime . value . format ( remote_path , listen_port , dump_format ) self . shell . execute_async ( load_command )
Load LiME kernel module from remote filesystem
23,119
def cleanup ( self ) : try : self . unload_lime ( ) except AttributeError as ex : pass self . tunnel . cleanup ( ) self . shell . cleanup ( )
Release resources used by supporting classes
23,120
def connect ( self , auth , address , port , jump_host , jump_auth ) : try : self . target_address = address sock = None if jump_host is not None : self . jump_host_ssh = paramiko . SSHClient ( ) self . jump_host_ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) self . connect_with_auth ( self . jump_host_ssh , jump_auth , jump_host [ 'addr' ] , jump_host [ 'port' ] , sock ) transport = self . jump_host_ssh . get_transport ( ) dest_addr = ( address , port ) jump_addr = ( jump_host [ 'addr' ] , jump_host [ 'port' ] ) channel = transport . open_channel ( 'direct-tcpip' , dest_addr , jump_addr ) self . connect_with_auth ( self . ssh , auth , address , port , channel ) else : self . connect_with_auth ( self . ssh , auth , address , port , sock ) except ( AuthenticationException , SSHException , ChannelException , SocketError ) as ex : raise SSHConnectionError ( "{0}:{1}" . format ( address , port ) , ex )
Creates an ssh session to a remote host
23,121
def connect_with_password ( self , ssh , username , password , address , port , sock , timeout = 20 ) : ssh . connect ( username = username , password = password , hostname = address , port = port , sock = sock , timeout = timeout )
Create an ssh session to a remote host with a username and password
23,122
def connect_with_key ( self , ssh , username , key , address , port , sock , timeout = 20 ) : ssh . connect ( hostname = address , port = port , username = username , pkey = key , sock = sock , timeout = timeout )
Create an ssh session to a remote host with a username and rsa key
23,123
def execute ( self , command ) : try : if self . ssh . get_transport ( ) is not None : logger . debug ( '{0}: executing "{1}"' . format ( self . target_address , command ) ) stdin , stdout , stderr = self . ssh . exec_command ( command ) return dict ( zip ( [ 'stdin' , 'stdout' , 'stderr' ] , [ stdin , stdout , stderr ] ) ) else : raise SSHConnectionError ( self . target_address , "ssh transport is closed" ) except ( AuthenticationException , SSHException , ChannelException , SocketError ) as ex : logger . critical ( ( "{0} execution failed on {1} with exception:" "{2}" . format ( command , self . target_address , ex ) ) ) raise SSHCommandError ( self . target_address , command , ex )
Executes command on remote hosts
23,124
def execute_async ( self , command , callback = None ) : try : logger . debug ( ( '{0}: execute async "{1}"' 'with callback {2}' . format ( self . target_address , command , callback ) ) ) future = self . executor . submit ( self . execute , command ) if callback is not None : future . add_done_callback ( callback ) return future except ( AuthenticationException , SSHException , ChannelException , SocketError ) as ex : logger . critical ( ( "{0} execution failed on {1} with exception:" "{2}" . format ( command , self . target_address , ex ) ) ) raise SSHCommandError ( self . target_address , command , ex )
Executes command on remote hosts without blocking
23,125
def decode ( self , stream , encoding = 'utf-8' ) : data = stream . read ( ) . decode ( encoding ) . strip ( "\n" ) if data != "" : logger . debug ( ( '{0}: decoded "{1}" with encoding ' '{2}' . format ( self . target_address , data , encoding ) ) ) return data
Convert paramiko stream into a string
23,126
def upload_file ( self , local_path , remote_path ) : logger . debug ( "{0}: uploading {1} to {0}:{2}" . format ( self . target_address , local_path , remote_path ) ) try : sftp = paramiko . SFTPClient . from_transport ( self . transport ( ) ) sftp . put ( local_path , remote_path ) sftp . close ( ) except SSHException as ex : logger . warn ( ( "{0}: LiME module upload failed with exception:" "{1}" . format ( self . target_address , ex ) ) )
Upload a file from the local filesystem to the remote host
23,127
def cleanup ( self ) : for future in self . futures : future . cancel ( ) self . executor . shutdown ( wait = 10 ) if self . ssh . get_transport ( ) != None : self . ssh . close ( )
Release resources used during shell execution
23,128
def parse_args ( self , args ) : parser = argparse . ArgumentParser ( description = 'Remote memory aquisition wrapper for LiME' ) root = parser . add_mutually_exclusive_group ( required = True ) root . add_argument ( '-c' , '--config' , help = 'path to config.yml' ) root . add_argument ( '--server' , help = 'hostname or ip of target server' ) root . add_argument ( '--version' , action = 'version' , version = "%(prog)s {ver}" . format ( ver = __version__ ) ) opts = parser . add_argument_group ( ) opts . add_argument ( '--port' , help = 'ssh port on remote server' ) opts . add_argument ( '--username' , help = 'username for ssh connection to target server' ) opts . add_argument ( '--module' , help = 'path to kernel lime kernel module' ) opts . add_argument ( '--password' , help = 'password for user or encrypted keyfile' ) opts . add_argument ( '--key' , help = 'path to rsa key for ssh connection' ) opts . add_argument ( '--jump-server' , help = 'hostname or ip of jump server' ) opts . add_argument ( '--jump-port' , help = 'ssh port on jump server' ) opts . add_argument ( '--jump-username' , help = 'username for ssh connection to jump server' ) opts . add_argument ( '--jump-password' , help = 'password for jump-user or encrypted keyfile' ) opts . add_argument ( '--jump-key' , help = 'path to rsa key for ssh connection to jump server' ) opts . add_argument ( '--filename' , help = 'memory dump filename' ) opts . add_argument ( '--repository' , action = 'store_true' , help = 'enable automatic kernel module downloads' ) opts . add_argument ( '--repository-url' , help = 'kernel module repository url' ) opts . add_argument ( '--repository-manifest' , help = 'specify alternate repository manifest' ) opts . add_argument ( '--gpg-no-verify' , dest = 'gpg_verify' , action = 'store_false' , help = 'skip lime module gpg signature check' ) opts . add_argument ( '--workers' , default = 1 , help = ( 'number of workers to run in parallel,' 'default: auto acceptable values are' '(INTEGER | "auto")' ) ) opts . add_argument ( '--verbose' , action = 'store_true' , help = 'log debug messages' ) opts . set_defaults ( repository_manifest = 'primary' ) opts . set_defaults ( gpg_verify = True ) output = parser . add_mutually_exclusive_group ( required = False ) output . add_argument ( '--bucket' , help = 'memory dump output bucket' ) output . add_argument ( '--output-dir' , help = 'memory dump output directory' ) log = parser . add_argument_group ( ) log . add_argument ( '--log-dir' , help = 'log directory' ) log . add_argument ( '--log-prefix' , help = 'log file prefix' ) return parser . parse_args ( args )
Parse arguments and return an arguments object
23,129
def configure ( self , arguments = None , config = None ) : if arguments is not None : args_config = self . configure_args ( arguments ) base_config = copy . deepcopy ( default_config ) working_config = self . merge_config ( base_config , args_config ) if config is not None : self . validate_config ( config ) base_config = copy . deepcopy ( default_config ) working_config = self . merge_config ( base_config , config ) repo = self . get_env_default ( 'LIME_REPOSITORY' , 'disabled' ) repo_url = self . get_env_default ( 'LIME_REPOSITORY_URL' , working_config [ 'repository' ] [ 'url' ] ) if repo . lower ( ) == 'enabled' : working_config [ 'repository' ] [ 'enabled' ] = True working_config [ 'repository' ] [ 'url' ] = repo_url return working_config
Merge command line arguments config files and default configs
23,130
def get_env_default ( self , variable , default ) : if variable in os . environ : env_var = os . environ [ variable ] else : env_var = default return env_var
Fetch environment variables returning a default if not found
23,131
def configure_args ( self , arguments ) : module , key , config_path = self . check_file_paths ( arguments . module , arguments . key , arguments . config ) log_dir = self . check_directory_paths ( arguments . log_dir ) if arguments . repository_url is None : url = default_config [ 'repository' ] [ 'url' ] else : url = arguments . repository_url args_config = dict ( aws = dict ( bucket = arguments . bucket ) , logging = dict ( dir = arguments . log_dir , prefix = arguments . log_prefix ) , workers = arguments . workers , repository = dict ( enabled = arguments . repository , url = url , manifest = arguments . repository_manifest , gpg_verify = arguments . gpg_verify ) ) if arguments . server is not None : jump_host = None if arguments . jump_server is not None : if arguments . jump_port is not None : jump_port = int ( arguments . jump_port ) else : jump_port = None jump_host = dict ( zip ( jump_host_allowed_keys , [ arguments . jump_server , jump_port , arguments . jump_username , arguments . jump_password , arguments . jump_key ] ) ) if arguments . port is not None : port = int ( arguments . port ) else : port = None host = dict ( zip ( host_allowed_keys , [ arguments . server , port , arguments . username , arguments . password , module , key , arguments . filename , jump_host ] ) ) args_config [ 'hosts' ] = [ ] args_config [ 'hosts' ] . append ( host ) if config_path is not None : try : config = self . load_config ( config_path ) self . validate_config ( config ) args_config . update ( config ) except YAMLError as ex : logger . warn ( 'Invalid yaml Format: {0}' . format ( ex ) ) raise except InvalidConfigurationError as ex : logger . warn ( ex ) raise return args_config
Create configuration has from command line arguments
23,132
def check_file_paths ( self , * args ) : for path in enumerate ( args ) : path = path [ 1 ] if path is not None : try : self . check_file_path ( path ) except OSError as ex : logger . warn ( ex ) raise return args
Ensure all arguments provided correspond to a file
23,133
def check_file_path ( self , path ) : if os . path . exists ( path ) is not True : msg = "File Not Found {}" . format ( path ) raise OSError ( msg )
Ensure file exists at the provided path
23,134
def check_directory_paths ( self , * args ) : for path in enumerate ( args ) : path = path [ 1 ] if path is not None : try : self . check_directory_path ( path ) except OSError as ex : logger . warn ( ex ) raise return args
Ensure all arguments correspond to directories
23,135
def check_directory_path ( self , path ) : if os . path . isdir ( path ) is not True : msg = "Directory Does Not Exist {}" . format ( path ) raise OSError ( msg )
Ensure directory exists at the provided path
23,136
def validate_config ( self , config ) : try : hosts = config [ 'hosts' ] except KeyError : raise InvalidConfigurationError ( 'hosts' , "" , reason = ( 'hosts configuration ' 'section is required' ) ) for key in config . keys ( ) : if key not in default_allowed_keys : raise InvalidConfigurationError ( key , config [ key ] ) bucket = False try : for key in config [ 'aws' ] . keys ( ) : if key == 'bucket' and config [ 'aws' ] [ key ] is not None : bucket = True if key not in aws_allowed_keys : raise InvalidConfigurationError ( key , config [ 'aws' ] [ key ] ) except KeyError : pass try : for key in config [ 'logging' ] . keys ( ) : if key not in logging_allowed_keys : raise InvalidConfigurationError ( key , config [ 'logging' ] [ key ] ) except KeyError : pass try : for key in config [ 'repository' ] . keys ( ) : if key not in repository_allowed_keys : raise InvalidConfigurationError ( key , config [ 'repository' ] [ key ] ) except KeyError : pass if type ( config [ 'hosts' ] ) is not list : raise InvalidConfigurationError ( 'hosts' , config [ 'hosts' ] , reason = "hosts must be a list" ) filename = False for host in config [ 'hosts' ] : for key in host . keys ( ) : if key == 'filename' and host [ 'filename' ] is not None : filename = True if key == 'jump_host' and host [ 'jump_host' ] is not None : for jump_key in host [ 'jump_host' ] . keys ( ) : if jump_key not in jump_host_allowed_keys : raise InvalidConfigurationError ( key , host [ 'jump_host' ] ) if key not in host_allowed_keys : raise InvalidConfigurationError ( key , host [ key ] ) if bucket and filename : raise InvalidConfigurationError ( 'bucket' , config [ 'aws' ] [ 'bucket' ] , reason = ( 'bucket configuration is' 'incompatible with filename' 'configuration in hosts' ) )
Validate configuration dict keys are supported
23,137
def load_key ( self , key_path , password ) : try : return paramiko . RSAKey . from_private_key_file ( key_path ) except PasswordRequiredException as ex : return paramiko . RSAKey . from_private_key_file ( key_path , password = password )
Creates paramiko rsa key
23,138
def run ( self ) : logger = logging . getLogger ( __name__ ) try : repo_conf = self . config [ 'repository' ] repo = None if repo_conf [ 'enabled' ] and repo_conf [ 'gpg_verify' ] : try : repo = Repository ( repo_conf [ 'url' ] , repo_conf [ 'gpg_verify' ] ) repo . init_gpg ( ) except Exception as ex : if repo is not None and self . library is False : if isinstance ( ex , RepositoryUntrustedSigningKeyError ) : installed = repo . prompt_for_install ( ) if installed is False : logger . critical ( ( "repository signature not " "installed, install the " "signature manually or use " "the --gpg-no-verify flag " "to bypass this check" ) ) quit ( 1 ) else : logger . critical ( ex ) quit ( 1 ) conf = self . map_config ( ) workers = Workers ( conf , self . config [ 'workers' ] , name = self . name , library = self . library ) description = 'memory capture action' results = workers . spawn ( description ) self . statistics ( results ) if self . library is True : return dict ( [ ( 'total' , self . total ) , ( 'completed' , self . completed_addresses ) , ( 'failed' , self . failed_addresses ) ] ) else : logger . info ( ( "{0} hosts processed. completed: {1} " "failed {2}" . format ( self . total , self . completed , self . failed ) ) ) logger . info ( "completed_hosts: {0}" . format ( self . completed_addresses ) ) logger . info ( "failed_hosts: {0}" . format ( self . failed_addresses ) ) quit ( ) except KeyboardInterrupt : workers . cleanup ( terminate = True ) if self . library : raise else : quit ( 1 )
Captures remote hosts memory
23,139
def capture ( self , tunnel_addr , tunnel_port , filename = None , bucket = None , destination = None ) : if filename is None : raise MemoryCaptureAttributeMissingError ( 'filename' ) if destination == OutputDestinations . local : logger . info ( "{0}: dumping to file://{1}" . format ( self . remote_addr , filename ) ) result = self . to_file ( filename , tunnel_addr , tunnel_port ) elif destination == OutputDestinations . s3 : if bucket is None : raise MemoryCaptureAttributeMissingError ( 'bucket' ) logger . info ( ( "{0}: dumping memory to s3://{1}/" "{2}" . format ( self . remote_addr , bucket , filename ) ) ) result = self . to_s3 ( bucket , filename , tunnel_addr , tunnel_port ) else : raise MemoryCaptureOutputMissingError ( self . remote_addr ) return result
Captures memory based on the provided OutputDestination
23,140
def to_file ( self , filename , tunnel_addr , tunnel_port ) : if self . progressbar : self . bar = ProgressBar ( widgets = self . widgets , maxval = self . max_size ) . start ( ) self . bar . start ( ) with open ( filename , 'wb' ) as self . outfile : self . sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) self . sock . connect ( ( tunnel_addr , tunnel_port ) ) self . sock . settimeout ( self . sock_timeout ) bytes_since_update = 0 while True : try : data = self . sock . recv ( self . recv_size ) data_length = len ( data ) if not data : break self . outfile . write ( data ) self . transfered = self . transfered + data_length bytes_since_update += data_length data = None data_length = 0 if bytes_since_update > self . update_threshold : self . update_progress ( ) bytes_since_update = 0 except ( socket . timeout , socket . error ) as ex : if isinstance ( ex , socket . timeout ) : break elif isinstance ( ex , socket . error ) : if ex . errno == errno . EINTR : pass else : self . cleanup ( ) raise else : self . cleanup ( ) raise self . cleanup ( ) logger . info ( '{0}: capture complete: {1}' . format ( self . remote_addr , filename ) ) return True
Writes memory dump to a local file
23,141
def update_progress ( self , complete = False ) : if self . progressbar : try : self . bar . update ( self . transfered ) except Exception as e : logger . debug ( "{0}: {1}, {2} exceeds memsize {3}" . format ( self . remote_addr , e , self . transfered , self . max_size ) ) if complete : self . bar . update ( self . max_size ) self . bar . finish ( ) else : percent = int ( 100 * float ( self . transfered ) / float ( self . max_size ) ) if percent % 10 == 0 : if self . progress != percent : logger . info ( "{0}: capture {1}% complete" . format ( self . remote_addr , percent ) ) self . progress = percent
Logs capture progress
23,142
def cleanup ( self ) : if self . sock is not None : self . sock . close ( ) if self . outfile is not None : self . outfile . close ( ) if self . bar is not None : self . update_progress ( complete = True )
Release resources used during memory capture
23,143
def init_gpg ( self ) : if self . gpg_verify : logger . debug ( "gpg verification enabled, initializing gpg" ) gpg_home = os . path . expanduser ( '~/.gnupg' ) self . gpg = gnupg . GPG ( gnupghome = gpg_home ) self . key_path , self . key_info = self . get_signing_key ( ) logger . debug ( "{0} {1}" . format ( self . key_path , self . key_info ) ) self . check_signing_key ( )
Initialize gpg object and check if repository signing key is trusted
23,144
def get_signing_key ( self ) : tmp_key_path = "/tmp/{0}" . format ( self . repo_signing_key ) tmp_metadata_path = "/tmp/{0}" . format ( self . key_metadata ) repo_key_path = "{0}/{1}" . format ( self . url , self . repo_signing_key ) repo_metadata_path = "{0}/{1}" . format ( self . url , self . key_metadata ) req_key = requests . get ( repo_key_path ) req_metadata = requests . get ( repo_metadata_path ) if req_key . status_code is 200 : logger . debug ( ( "found repository signing key at " "{0}" . format ( repo_key_path ) ) ) self . raw_key = req_key . content with open ( tmp_key_path , 'wb' ) as f : f . write ( self . raw_key ) else : raise RepositoryMissingSigningKeyError ( repo_key_path ) if req_metadata . status_code is 200 : logger . debug ( ( "found key metadata at " "{0}" . format ( repo_metadata_path ) ) ) print ( req_metadata . content ) key_info = json . loads ( req_metadata . content . decode ( 'utf-8' ) ) else : RepositoryMissingKeyMetadataError return ( tmp_key_path , key_info )
Download a local copy of repo signing key for installation
23,145
def check_signing_key ( self ) : user_keys = self . gpg . list_keys ( ) if len ( user_keys ) > 0 : trusted = False for key in user_keys : if key [ 'fingerprint' ] == self . key_info [ 'fingerprint' ] : trusted = True logger . debug ( ( "repo signing key trusted in user keyring, " "fingerprint {0}" . format ( key [ 'fingerprint' ] ) ) ) else : trusted = False if trusted is False : repo_key_url = "{0}/{1}" . format ( self . url , self . repo_signing_key ) raise RepositoryUntrustedSigningKeyError ( repo_key_url , self . key_info [ 'fingerprint' ] )
Check that repo signing key is trusted by gpg keychain
23,146
def prompt_for_install ( self ) : print ( self . key_info ) repo_key_url = "{0}/{1}" . format ( self . url , self . repo_signing_key ) print ( ( "warning: Repository key untrusted \n" "Importing GPG key 0x{0}:\n" " Userid: \"{1}\"\n" " From : {2}" . format ( self . key_info [ 'fingerprint' ] , self . key_info [ 'uids' ] [ 0 ] , repo_key_url ) ) ) response = prompt ( u'Is this ok: [y/N] ' ) if response == 'y' : self . install_key ( self . raw_key ) return True else : return False
Prompt user to install untrusted repo signing key
23,147
def install_key ( self , key_data ) : logger . info ( ( "importing repository signing key {0} " "{1}" . format ( self . key_info [ 'fingerprint' ] , self . key_info [ 'uids' ] [ 0 ] ) ) ) import_result = self . gpg . import_keys ( key_data ) logger . debug ( "import results: {0}" . format ( import_result . results ) )
Install untrusted repo signing key
23,148
def fetch ( self , kernel_version , manifest_type ) : metadata = self . get_metadata ( ) logger . debug ( "parsed metadata: {0}" . format ( metadata ) ) manifest = self . get_manifest ( metadata [ 'manifests' ] [ manifest_type ] ) try : module = manifest [ kernel_version ] logger . debug ( "found module {0}" . format ( module ) ) except KeyError : raise KernelModuleNotFoundError ( kernel_version , self . url ) path = self . fetch_module ( module ) return path
Search repository for kernel module matching kernel_version
23,149
def get_metadata ( self ) : metadata_path = "{}/{}/{}" . format ( self . url , self . metadata_dir , self . metadata_file ) metadata_sig_path = "{}/{}/{}.sig" . format ( self . url . rstrip ( '/' ) , self . metadata_dir , self . metadata_file ) req = requests . get ( metadata_path ) if req . status_code is 200 : raw_metadata = req . content else : raise RepositoryError ( metadata_path , ( "status code not 200: " "{}" . format ( req . status_code ) ) ) if self . gpg_verify : self . verify_data_signature ( metadata_sig_path , metadata_path , raw_metadata ) return self . parse_metadata ( raw_metadata )
Fetch repository repomd . xml file
23,150
def parse_metadata ( self , metadata_xml ) : try : metadata = dict ( ) mdata = xmltodict . parse ( metadata_xml ) [ 'metadata' ] metadata [ 'revision' ] = mdata [ 'revision' ] metadata [ 'manifests' ] = dict ( ) if type ( mdata [ 'data' ] ) is list : manifests = mdata [ 'data' ] else : manifests = [ mdata [ 'data' ] ] for manifest in manifests : manifest_dict = dict ( ) manifest_dict [ 'type' ] = manifest [ '@type' ] manifest_dict [ 'checksum' ] = manifest [ 'checksum' ] manifest_dict [ 'open_checksum' ] = manifest [ 'open_checksum' ] manifest_dict [ 'location' ] = manifest [ 'location' ] [ '@href' ] manifest_dict [ 'timestamp' ] = datetime . fromtimestamp ( int ( manifest [ 'timestamp' ] ) ) manifest_dict [ 'size' ] = int ( manifest [ 'size' ] ) manifest_dict [ 'open_size' ] = int ( manifest [ 'open_size' ] ) metadata [ 'manifests' ] [ manifest [ '@type' ] ] = manifest_dict except Exception as e : raise RepositoryError ( "{0}/{1}" . format ( self . url , self . metadata_dir , self . metadata_file ) , e ) return metadata
Parse repomd . xml file
23,151
def get_manifest ( self , metadata ) : manifest_path = "{0}/{1}" . format ( self . url , metadata [ 'location' ] ) req = requests . get ( manifest_path , stream = True ) if req . status_code is 200 : gz_manifest = req . raw . read ( ) self . verify_checksum ( gz_manifest , metadata [ 'checksum' ] , metadata [ 'location' ] ) manifest = self . unzip_manifest ( gz_manifest ) self . verify_checksum ( manifest , metadata [ 'open_checksum' ] , metadata [ 'location' ] . rstrip ( '.gz' ) ) return self . parse_manifest ( manifest )
Get latest manifest as specified in repomd . xml
23,152
def unzip_manifest ( self , raw_manifest ) : buf = BytesIO ( raw_manifest ) f = gzip . GzipFile ( fileobj = buf ) manifest = f . read ( ) return manifest
Decompress gzip encoded manifest
23,153
def parse_manifest ( self , manifest_xml ) : manifest = dict ( ) try : mdata = xmltodict . parse ( manifest_xml ) [ 'modules' ] [ 'module' ] for module in mdata : mod = dict ( ) mod [ 'type' ] = module [ '@type' ] mod [ 'name' ] = module [ 'name' ] mod [ 'arch' ] = module [ 'arch' ] mod [ 'checksum' ] = module [ 'checksum' ] mod [ 'version' ] = module [ 'version' ] mod [ 'packager' ] = module [ 'packager' ] mod [ 'location' ] = module [ 'location' ] [ '@href' ] mod [ 'signature' ] = module [ 'signature' ] [ '@href' ] mod [ 'platform' ] = module [ 'platform' ] manifest [ mod [ 'version' ] ] = mod except Exception as e : raise return manifest
Parse manifest xml file
23,154
def fetch_module ( self , module ) : tm = int ( time . time ( ) ) datestamp = datetime . utcfromtimestamp ( tm ) . isoformat ( ) filename = "lime-{0}-{1}.ko" . format ( datestamp , module [ 'version' ] ) url = "{0}/{1}" . format ( self . url , module [ 'location' ] ) logger . info ( "downloading {0} as {1}" . format ( url , filename ) ) req = requests . get ( url , stream = True ) with open ( filename , 'wb' ) as f : f . write ( req . raw . read ( ) ) self . verify_module ( filename , module , self . gpg_verify ) return filename
Download and verify kernel module
23,155
def verify_module ( self , filename , module , verify_signature ) : with open ( filename , 'rb' ) as f : module_data = f . read ( ) self . verify_checksum ( module_data , module [ 'checksum' ] , module [ 'location' ] ) if self . gpg_verify : signature_url = "{0}/{1}" . format ( self . url , module [ 'signature' ] ) file_url = "{0}/{1}" . format ( self . url , module [ 'location' ] ) self . verify_file_signature ( signature_url , file_url , filename )
Verify kernel module checksum and signature
23,156
def verify_checksum ( self , data , checksum , filename ) : calculated_checksum = hashlib . sha256 ( data ) . hexdigest ( ) logger . debug ( "calculated checksum {0} for {1}" . format ( calculated_checksum , filename ) ) if calculated_checksum != checksum : raise RepositoryError ( "{0}/{1}" . format ( self . url , filename ) , ( "checksum verification failed, expected " "{0} got {1}" . format ( checksum , calculated_checksum ) ) )
Verify sha256 checksum vs calculated checksum
23,157
def verify_data_signature ( self , signature_url , data_url , data ) : req = requests . get ( signature_url ) if req . status_code is 200 : tm = int ( time . time ( ) ) datestamp = datetime . utcfromtimestamp ( tm ) . isoformat ( ) sigfile = "repo-{0}-tmp.sig" . format ( datestamp ) logger . debug ( "writing {0} to {1}" . format ( signature_url , sigfile ) ) with open ( sigfile , 'wb' ) as f : f . write ( req . content ) else : raise RepositoryMissingSignatureError ( signature_url ) verified = self . gpg . verify_data ( sigfile , data ) try : os . remove ( sigfile ) except OSError : pass if verified . valid is True : logger . debug ( "verified {0} against {1}" . format ( data_url , signature_url ) ) else : raise RepositorySignatureError ( data_url , signature_url )
Verify data against it s remote signature
23,158
def verify_file_signature ( self , signature_url , file_url , filename ) : req = requests . get ( signature_url , stream = True ) if req . status_code is 200 : sigfile = req . raw else : raise RepositoryMissingSignatureError ( signature_url ) verified = self . gpg . verify_file ( sigfile , filename ) if verified . valid is True : logger . debug ( "verified {0} against {1}" . format ( filename , signature_url ) ) else : raise RepositorySignatureError ( file_url , signature_url )
Verify a local file against it s remote signature
23,159
def unpack_rsp ( cls , rsp_pb ) : ret_type = rsp_pb . retType ret_msg = rsp_pb . retMsg if ret_type != RET_OK : return RET_ERROR , ret_msg , None res = { } if rsp_pb . HasField ( 's2c' ) : res [ 'server_version' ] = rsp_pb . s2c . serverVer res [ 'login_user_id' ] = rsp_pb . s2c . loginUserID res [ 'conn_id' ] = rsp_pb . s2c . connID res [ 'conn_key' ] = rsp_pb . s2c . connAESKey res [ 'keep_alive_interval' ] = rsp_pb . s2c . keepAliveInterval else : return RET_ERROR , "rsp_pb error" , None return RET_OK , "" , res
Unpack the init connect response
23,160
def unpack_unsubscribe_rsp ( cls , rsp_pb ) : if rsp_pb . retType != RET_OK : return RET_ERROR , rsp_pb . retMsg , None return RET_OK , "" , None
Unpack the un - subscribed response
23,161
def dict2pb ( cls , adict , strict = False ) : obj = cls ( ) for field in obj . DESCRIPTOR . fields : if not field . label == field . LABEL_REQUIRED : continue if not field . has_default_value : continue if not field . name in adict : raise ConvertException ( 'Field "%s" missing from descriptor dictionary.' % field . name ) field_names = set ( [ field . name for field in obj . DESCRIPTOR . fields ] ) if strict : for key in adict . keys ( ) : if key not in field_names : raise ConvertException ( 'Key "%s" can not be mapped to field in %s class.' % ( key , type ( obj ) ) ) for field in obj . DESCRIPTOR . fields : if not field . name in adict : continue msg_type = field . message_type if field . label == FD . LABEL_REPEATED : if field . type == FD . TYPE_MESSAGE : for sub_dict in adict [ field . name ] : item = getattr ( obj , field . name ) . add ( ) item . CopyFrom ( dict2pb ( msg_type . _concrete_class , sub_dict ) ) else : list ( map ( getattr ( obj , field . name ) . append , adict [ field . name ] ) ) else : if field . type == FD . TYPE_MESSAGE : value = dict2pb ( msg_type . _concrete_class , adict [ field . name ] ) getattr ( obj , field . name ) . CopyFrom ( value ) elif field . type in [ FD . TYPE_UINT64 , FD . TYPE_INT64 , FD . TYPE_SINT64 ] : setattr ( obj , field . name , int ( adict [ field . name ] ) ) else : setattr ( obj , field . name , adict [ field . name ] ) return obj
Takes a class representing the ProtoBuf Message and fills it with data from the dict .
23,162
def pb2dict ( obj ) : adict = { } if not obj . IsInitialized ( ) : return None for field in obj . DESCRIPTOR . fields : if not getattr ( obj , field . name ) : continue if not field . label == FD . LABEL_REPEATED : if not field . type == FD . TYPE_MESSAGE : adict [ field . name ] = getattr ( obj , field . name ) else : value = pb2dict ( getattr ( obj , field . name ) ) if value : adict [ field . name ] = value else : if field . type == FD . TYPE_MESSAGE : adict [ field . name ] = [ pb2dict ( v ) for v in getattr ( obj , field . name ) ] else : adict [ field . name ] = [ v for v in getattr ( obj , field . name ) ] return adict
Takes a ProtoBuf Message obj and convertes it to a dict .
23,163
def json2pb ( cls , json , strict = False ) : return dict2pb ( cls , simplejson . loads ( json ) , strict )
Takes a class representing the Protobuf Message and fills it with data from the json string .
23,164
def _split_stock_code ( self , code ) : stock_str = str ( code ) split_loc = stock_str . find ( "." ) if 0 <= split_loc < len ( stock_str ) - 1 and stock_str [ 0 : split_loc ] in MKT_MAP : market_str = stock_str [ 0 : split_loc ] partial_stock_str = stock_str [ split_loc + 1 : ] return RET_OK , ( market_str , partial_stock_str ) else : error_str = ERROR_STR_PREFIX + "format of %s is wrong. (US.AAPL, HK.00700, SZ.000001)" % stock_str return RET_ERROR , error_str
do not use the built - in split function in python . The built - in function cannot handle some stock strings correctly . for instance US .. DJI where the dot . itself is a part of original code
23,165
def position_list_query ( self , code = '' , pl_ratio_min = None , pl_ratio_max = None , trd_env = TrdEnv . REAL , acc_id = 0 , acc_index = 0 ) : ret , msg = self . _check_trd_env ( trd_env ) if ret != RET_OK : return ret , msg ret , msg , acc_id = self . _check_acc_id_and_acc_index ( trd_env , acc_id , acc_index ) if ret != RET_OK : return ret , msg ret , msg , stock_code = self . _check_stock_code ( code ) if ret != RET_OK : return ret , msg query_processor = self . _get_sync_query_processor ( PositionListQuery . pack_req , PositionListQuery . unpack_rsp ) kargs = { 'code' : str ( stock_code ) , 'pl_ratio_min' : pl_ratio_min , 'pl_ratio_max' : pl_ratio_max , 'trd_mkt' : self . __trd_mkt , 'trd_env' : trd_env , 'acc_id' : acc_id , 'conn_id' : self . get_sync_conn_id ( ) } ret_code , msg , position_list = query_processor ( ** kargs ) if ret_code != RET_OK : return RET_ERROR , msg col_list = [ "code" , "stock_name" , "qty" , "can_sell_qty" , "cost_price" , "cost_price_valid" , "market_val" , "nominal_price" , "pl_ratio" , "pl_ratio_valid" , "pl_val" , "pl_val_valid" , "today_buy_qty" , "today_buy_val" , "today_pl_val" , "today_sell_qty" , "today_sell_val" , "position_side" ] position_list_table = pd . DataFrame ( position_list , columns = col_list ) return RET_OK , position_list_table
for querying the position list
23,166
def deal_list_query ( self , code = "" , trd_env = TrdEnv . REAL , acc_id = 0 , acc_index = 0 ) : ret , msg = self . _check_trd_env ( trd_env ) if ret != RET_OK : return ret , msg ret , msg , acc_id = self . _check_acc_id_and_acc_index ( trd_env , acc_id , acc_index ) if ret != RET_OK : return ret , msg ret , msg , stock_code = self . _check_stock_code ( code ) if ret != RET_OK : return ret , msg query_processor = self . _get_sync_query_processor ( DealListQuery . pack_req , DealListQuery . unpack_rsp ) kargs = { 'code' : stock_code , 'trd_mkt' : self . __trd_mkt , 'trd_env' : trd_env , 'acc_id' : acc_id , 'conn_id' : self . get_sync_conn_id ( ) } ret_code , msg , deal_list = query_processor ( ** kargs ) if ret_code != RET_OK : return RET_ERROR , msg col_list = [ "code" , "stock_name" , "deal_id" , "order_id" , "qty" , "price" , "trd_side" , "create_time" , "counter_broker_id" , "counter_broker_name" ] deal_list_table = pd . DataFrame ( deal_list , columns = col_list ) return RET_OK , deal_list_table
for querying deal list
23,167
def is_sock_ok ( self , timeout_select ) : self . _socket_lock . acquire ( ) try : ret = self . _is_socket_ok ( timeout_select ) finally : self . _socket_lock . release ( ) return ret
check if socket is OK
23,168
def check_date_str_format ( s , default_time = "00:00:00" ) : try : str_fmt = s if ":" not in s : str_fmt = '{} {}' . format ( s , default_time ) dt_obj = datetime . strptime ( str_fmt , "%Y-%m-%d %H:%M:%S" ) return RET_OK , dt_obj except ValueError : error_str = ERROR_STR_PREFIX + "wrong time or time format" return RET_ERROR , error_str
Check the format of date string
23,169
def normalize_date_format ( date_str , default_time = "00:00:00" ) : ret_code , ret_data = check_date_str_format ( date_str , default_time ) if ret_code != RET_OK : return ret_code , ret_data return RET_OK , ret_data . strftime ( "%Y-%m-%d %H:%M:%S" )
normalize the format of data
23,170
def extract_pls_rsp ( rsp_str ) : try : rsp = json . loads ( rsp_str ) except ValueError : traceback . print_exc ( ) err = sys . exc_info ( ) [ 1 ] err_str = ERROR_STR_PREFIX + str ( err ) return RET_ERROR , err_str , None error_code = int ( rsp [ 'retType' ] ) if error_code != 1 : error_str = ERROR_STR_PREFIX + rsp [ 'retMsg' ] return RET_ERROR , error_str , None return RET_OK , "" , rsp
Extract the response of PLS
23,171
def split_stock_str ( stock_str_param ) : stock_str = str ( stock_str_param ) split_loc = stock_str . find ( "." ) if 0 <= split_loc < len ( stock_str ) - 1 and stock_str [ 0 : split_loc ] in MKT_MAP : market_str = stock_str [ 0 : split_loc ] market_code = MKT_MAP [ market_str ] partial_stock_str = stock_str [ split_loc + 1 : ] return RET_OK , ( market_code , partial_stock_str ) else : error_str = ERROR_STR_PREFIX + "format of %s is wrong. (US.AAPL, HK.00700, SZ.000001)" % stock_str return RET_ERROR , error_str
split the stock string
23,172
def set_pre_handler ( self , handler ) : with self . _lock : if self . _handler_ctx is not None : return self . _handler_ctx . set_pre_handler ( handler ) return RET_ERROR
set pre handler
23,173
def msearch ( self , m , query , fields = None , limit = None , or_ = True ) : ix = self . _index ( m ) if fields is None : fields = ix . fields group = OrGroup if or_ else AndGroup parser = MultifieldParser ( fields , ix . schema , group = group ) return ix . search ( parser . parse ( query ) , limit = limit )
set limit make search faster
23,174
def update ( self , ** kwargs ) : "Update document not update index." kw = dict ( index = self . name , doc_type = self . doc_type , ignore = [ 404 ] ) kw . update ( ** kwargs ) return self . _client . update ( ** kw )
Update document not update index .
23,175
def diagonalize_collision_matrix ( collision_matrices , i_sigma = None , i_temp = None , pinv_solver = 0 , log_level = 0 ) : start = time . time ( ) shape = collision_matrices . shape if len ( shape ) == 6 : size = shape [ 2 ] * shape [ 3 ] assert size == shape [ 4 ] * shape [ 5 ] elif len ( shape ) == 8 : size = np . prod ( shape [ 2 : 5 ] ) assert size == np . prod ( shape [ 5 : 8 ] ) elif len ( shape ) == 2 : size = shape [ 0 ] assert size == shape [ 1 ] solver = _select_solver ( pinv_solver ) if solver in [ 1 , 2 ] : if log_level : routine = [ 'dsyev' , 'dsyevd' ] [ solver - 1 ] sys . stdout . write ( "Diagonalizing by lapacke %s... " % routine ) sys . stdout . flush ( ) import phono3py . _phono3py as phono3c w = np . zeros ( size , dtype = 'double' ) if i_sigma is None : _i_sigma = 0 else : _i_sigma = i_sigma if i_temp is None : _i_temp = 0 else : _i_temp = i_temp phono3c . diagonalize_collision_matrix ( collision_matrices , w , _i_sigma , _i_temp , 0.0 , ( solver + 1 ) % 2 , 0 ) elif solver == 3 : if log_level : sys . stdout . write ( "Diagonalizing by np.linalg.eigh... " ) sys . stdout . flush ( ) col_mat = collision_matrices [ i_sigma , i_temp ] . reshape ( size , size ) w , col_mat [ : ] = np . linalg . eigh ( col_mat ) elif solver == 4 : if log_level : sys . stdout . write ( "Diagonalizing by " "scipy.linalg.lapack.dsyev... " ) sys . stdout . flush ( ) import scipy . linalg col_mat = collision_matrices [ i_sigma , i_temp ] . reshape ( size , size ) w , _ , info = scipy . linalg . lapack . dsyev ( col_mat . T , overwrite_a = 1 ) elif solver == 5 : if log_level : sys . stdout . write ( "Diagonalizing by " "scipy.linalg.lapack.dsyevd... " ) sys . stdout . flush ( ) import scipy . linalg col_mat = collision_matrices [ i_sigma , i_temp ] . reshape ( size , size ) w , _ , info = scipy . linalg . lapack . dsyevd ( col_mat . T , overwrite_a = 1 ) if log_level : print ( "[%.3fs]" % ( time . time ( ) - start ) ) sys . stdout . flush ( ) return w
Diagonalize collision matrices .
23,176
def _get_weights ( self ) : weights = [ ] n = float ( self . _rot_grid_points . shape [ 1 ] ) for r_gps in self . _rot_grid_points : weights . append ( np . sqrt ( len ( np . unique ( r_gps ) ) / n ) ) return weights
Returns weights used for collision matrix and |X > and |f >
23,177
def _get_I ( self , a , b , size , plus_transpose = True ) : r_sum = np . zeros ( ( 3 , 3 ) , dtype = 'double' , order = 'C' ) for r in self . _rotations_cartesian : for i in range ( 3 ) : for j in range ( 3 ) : r_sum [ i , j ] += r [ a , i ] * r [ b , j ] if plus_transpose : r_sum += r_sum . T if ( np . abs ( r_sum ) < 1e-10 ) . all ( ) : return None I_mat = np . zeros ( ( 3 * size , 3 * size ) , dtype = 'double' , order = 'C' ) for i in range ( size ) : I_mat [ ( i * 3 ) : ( ( i + 1 ) * 3 ) , ( i * 3 ) : ( ( i + 1 ) * 3 ) ] = r_sum return I_mat
Return I matrix in Chaput s PRL paper .
23,178
def _set_mode_kappa_Chaput ( self , i_sigma , i_temp , weights ) : X = self . _get_X ( i_temp , weights , self . _gv ) . ravel ( ) num_ir_grid_points = len ( self . _ir_grid_points ) num_band = self . _primitive . get_number_of_atoms ( ) * 3 size = num_ir_grid_points * num_band * 3 v = self . _collision_matrix [ i_sigma , i_temp ] . reshape ( size , size ) solver = _select_solver ( self . _pinv_solver ) if solver in [ 1 , 2 , 4 , 5 ] : v = v . T e = self . _get_eigvals_pinv ( i_sigma , i_temp ) t = self . _temperatures [ i_temp ] omega_inv = np . empty ( v . shape , dtype = 'double' , order = 'C' ) np . dot ( v , ( e * v ) . T , out = omega_inv ) Y = np . dot ( omega_inv , X ) self . _set_f_vectors ( Y , num_ir_grid_points , weights ) elems = ( ( 0 , 0 ) , ( 1 , 1 ) , ( 2 , 2 ) , ( 1 , 2 ) , ( 0 , 2 ) , ( 0 , 1 ) ) for i , vxf in enumerate ( elems ) : mat = self . _get_I ( vxf [ 0 ] , vxf [ 1 ] , num_ir_grid_points * num_band ) self . _mode_kappa [ i_sigma , i_temp , : , : , i ] = 0 if mat is not None : np . dot ( mat , omega_inv , out = mat ) w = diagonalize_collision_matrix ( mat , pinv_solver = self . _pinv_solver , log_level = self . _log_level ) if solver in [ 1 , 2 , 4 , 5 ] : mat = mat . T spectra = np . dot ( mat . T , X ) ** 2 * w for s , eigvec in zip ( spectra , mat . T ) : vals = s * ( eigvec ** 2 ) . reshape ( - 1 , 3 ) . sum ( axis = 1 ) vals = vals . reshape ( num_ir_grid_points , num_band ) self . _mode_kappa [ i_sigma , i_temp , : , : , i ] += vals factor = self . _conversion_factor * Kb * t ** 2 self . _mode_kappa [ i_sigma , i_temp ] *= factor
Calculate mode kappa by the way in Laurent Chaput s PRL paper .
23,179
def get_third_order_displacements ( cell , symmetry , is_plusminus = 'auto' , is_diagonal = False ) : positions = cell . get_scaled_positions ( ) lattice = cell . get_cell ( ) . T disps_first = get_least_displacements ( symmetry , is_plusminus = is_plusminus , is_diagonal = False ) symprec = symmetry . get_symmetry_tolerance ( ) dds = [ ] for disp in disps_first : atom1 = disp [ 0 ] disp1 = disp [ 1 : 4 ] site_sym = symmetry . get_site_symmetry ( atom1 ) dds_atom1 = { 'number' : atom1 , 'direction' : disp1 , 'second_atoms' : [ ] } reduced_site_sym = get_reduced_site_symmetry ( site_sym , disp1 , symprec ) second_atoms = get_least_orbits ( atom1 , cell , reduced_site_sym , symprec ) for atom2 in second_atoms : dds_atom2 = get_next_displacements ( atom1 , atom2 , reduced_site_sym , lattice , positions , symprec , is_diagonal ) min_vec = get_equivalent_smallest_vectors ( atom1 , atom2 , cell , symprec ) [ 0 ] min_distance = np . linalg . norm ( np . dot ( lattice , min_vec ) ) dds_atom2 [ 'distance' ] = min_distance dds_atom1 [ 'second_atoms' ] . append ( dds_atom2 ) dds . append ( dds_atom1 ) return dds
Create dispalcement dataset
23,180
def get_bond_symmetry ( site_symmetry , lattice , positions , atom_center , atom_disp , symprec = 1e-5 ) : bond_sym = [ ] pos = positions for rot in site_symmetry : rot_pos = ( np . dot ( pos [ atom_disp ] - pos [ atom_center ] , rot . T ) + pos [ atom_center ] ) diff = pos [ atom_disp ] - rot_pos diff -= np . rint ( diff ) dist = np . linalg . norm ( np . dot ( lattice , diff ) ) if dist < symprec : bond_sym . append ( rot ) return np . array ( bond_sym )
Bond symmetry is the symmetry operations that keep the symmetry of the cell containing two fixed atoms .
23,181
def get_least_orbits ( atom_index , cell , site_symmetry , symprec = 1e-5 ) : orbits = _get_orbits ( atom_index , cell , site_symmetry , symprec ) mapping = np . arange ( cell . get_number_of_atoms ( ) ) for i , orb in enumerate ( orbits ) : for num in np . unique ( orb ) : if mapping [ num ] > mapping [ i ] : mapping [ num ] = mapping [ i ] return np . unique ( mapping )
Find least orbits for a centering atom
23,182
def write_fc3_to_hdf5 ( fc3 , filename = 'fc3.hdf5' , p2s_map = None , compression = None ) : with h5py . File ( filename , 'w' ) as w : w . create_dataset ( 'fc3' , data = fc3 , compression = compression ) if p2s_map is not None : w . create_dataset ( 'p2s_map' , data = p2s_map )
Write third - order force constants in hdf5 format .
23,183
def write_unitary_matrix_to_hdf5 ( temperature , mesh , unitary_matrix = None , sigma = None , sigma_cutoff = None , solver = None , filename = None , verbose = False ) : suffix = _get_filename_suffix ( mesh , sigma = sigma , sigma_cutoff = sigma_cutoff , filename = filename ) hdf5_filename = "unitary" + suffix + ".hdf5" with h5py . File ( hdf5_filename , 'w' ) as w : w . create_dataset ( 'temperature' , data = temperature ) if unitary_matrix is not None : w . create_dataset ( 'unitary_matrix' , data = unitary_matrix ) if solver is not None : w . create_dataset ( 'solver' , data = solver ) if verbose : if len ( temperature ) > 1 : text = "Unitary matrices " else : text = "Unitary matrix " if sigma is not None : text += "at sigma %s " % _del_zeros ( sigma ) if sigma_cutoff is not None : text += "(%4.2f SD) " % sigma_cutoff if len ( temperature ) > 1 : text += "were written into " else : text += "was written into " if sigma is not None : text += "\n" text += "\"%s\"." % hdf5_filename print ( text )
Write eigenvectors of collision matrices at temperatures .
23,184
def get_frequency_shift ( self , grid_points , temperatures = np . arange ( 0 , 1001 , 10 , dtype = 'double' ) , epsilons = None , output_filename = None ) : if self . _interaction is None : self . set_phph_interaction ( ) if epsilons is None : _epsilons = [ 0.1 ] else : _epsilons = epsilons self . _grid_points = grid_points get_frequency_shift ( self . _interaction , self . _grid_points , self . _band_indices , _epsilons , temperatures , output_filename = output_filename , log_level = self . _log_level )
Frequency shift from lowest order diagram is calculated .
23,185
def _get_triplets_reciprocal_mesh_at_q ( fixed_grid_number , mesh , rotations , is_time_reversal = True , swappable = True ) : import phono3py . _phono3py as phono3c map_triplets = np . zeros ( np . prod ( mesh ) , dtype = 'uintp' ) map_q = np . zeros ( np . prod ( mesh ) , dtype = 'uintp' ) grid_address = np . zeros ( ( np . prod ( mesh ) , 3 ) , dtype = 'intc' ) phono3c . triplets_reciprocal_mesh_at_q ( map_triplets , map_q , grid_address , fixed_grid_number , np . array ( mesh , dtype = 'intc' ) , is_time_reversal * 1 , np . array ( rotations , dtype = 'intc' , order = 'C' ) , swappable * 1 ) return map_triplets , map_q , grid_address
Search symmetry reduced triplets fixing one q - point
23,186
def get_averaged_interaction ( self ) : v = self . _interaction_strength w = self . _weights_at_q v_sum = np . dot ( w , v . sum ( axis = 2 ) . sum ( axis = 2 ) ) return v_sum / np . prod ( v . shape [ 2 : ] )
Return sum over phonon triplets of interaction strength
23,187
def optimize ( lattice , positions , numbers , displacements , forces , alm_options = None , p2s_map = None , p2p_map = None , log_level = 0 ) : from alm import ALM with ALM ( lattice , positions , numbers ) as alm : natom = len ( numbers ) alm . set_verbosity ( log_level ) nkd = len ( np . unique ( numbers ) ) if 'cutoff_distance' not in alm_options : rcs = - np . ones ( ( 2 , nkd , nkd ) , dtype = 'double' ) elif type ( alm_options [ 'cutoff_distance' ] ) is float : rcs = np . ones ( ( 2 , nkd , nkd ) , dtype = 'double' ) rcs [ 0 ] *= - 1 rcs [ 1 ] *= alm_options [ 'cutoff_distance' ] alm . define ( 2 , rcs ) alm . set_displacement_and_force ( displacements , forces ) if 'solver' in alm_options : solver = alm_options [ 'solver' ] else : solver = 'SimplicialLDLT' info = alm . optimize ( solver = solver ) fc2 = extract_fc2_from_alm ( alm , natom , atom_list = p2s_map , p2s_map = p2s_map , p2p_map = p2p_map ) fc3 = _extract_fc3_from_alm ( alm , natom , p2s_map = p2s_map , p2p_map = p2p_map ) return fc2 , fc3
Calculate force constants
23,188
def _get_alm_disp_fc3 ( disp_dataset ) : natom = disp_dataset [ 'natom' ] ndisp = len ( disp_dataset [ 'first_atoms' ] ) for disp1 in disp_dataset [ 'first_atoms' ] : ndisp += len ( disp1 [ 'second_atoms' ] ) disp = np . zeros ( ( ndisp , natom , 3 ) , dtype = 'double' , order = 'C' ) indices = [ ] count = 0 for disp1 in disp_dataset [ 'first_atoms' ] : indices . append ( count ) disp [ count , disp1 [ 'number' ] ] = disp1 [ 'displacement' ] count += 1 for disp1 in disp_dataset [ 'first_atoms' ] : for disp2 in disp1 [ 'second_atoms' ] : if 'included' in disp2 : if disp2 [ 'included' ] : indices . append ( count ) else : indices . append ( count ) disp [ count , disp1 [ 'number' ] ] = disp1 [ 'displacement' ] disp [ count , disp2 [ 'number' ] ] = disp2 [ 'displacement' ] count += 1 return disp , indices
Create displacements of atoms for ALM input
23,189
def capture_dash_in_url_name ( self , node ) : for keyword in node . keywords : if keyword . arg == 'name' and '-' in keyword . value . s : return DJ04 ( lineno = node . lineno , col = node . col_offset , )
Capture dash in URL name
23,190
def capture_url_missing_namespace ( self , node ) : for arg in node . args : if not ( isinstance ( arg , ast . Call ) and isinstance ( arg . func , ast . Name ) ) : continue if arg . func . id != 'include' : continue for keyword in arg . keywords : if keyword . arg == 'namespace' : return return DJ05 ( lineno = node . lineno , col = node . col_offset , )
Capture missing namespace in url include .
23,191
def get_call_name ( self , node ) : if isinstance ( node . func , ast . Attribute ) : return node . func . attr elif isinstance ( node . func , ast . Name ) : return node . func . id
Return call name for the given node .
23,192
def message ( self ) : message = self . description . format ( ** self . parameters ) return '{code} {message}' . format ( code = self . code , message = message )
Return issue message .
23,193
def run ( self , node ) : if not self . checker_applies ( node ) : return issues = [ ] for body in node . body : if not isinstance ( body , ast . ClassDef ) : continue for element in body . body : if not isinstance ( element , ast . Assign ) : continue for target in element . targets : if target . id == 'fields' and self . is_string_dunder_all ( element ) : issues . append ( DJ07 ( lineno = node . lineno , col = node . col_offset , ) ) elif target . id == 'exclude' : issues . append ( DJ06 ( lineno = node . lineno , col = node . col_offset , ) ) return issues
Captures the use of exclude in ModelForm Meta
23,194
def detect_scheme_and_format ( source ) : if hasattr ( source , 'read' ) : return ( 'stream' , None ) if not isinstance ( source , six . string_types ) : return ( None , 'inline' ) if 'docs.google.com/spreadsheets' in source : if 'export' not in source and 'pub' not in source : return ( None , 'gsheet' ) elif 'csv' in source : return ( 'https' , 'csv' ) for sql_scheme in config . SQL_SCHEMES : if source . startswith ( '%s://' % sql_scheme ) : return ( None , 'sql' ) parsed = urlparse ( source ) scheme = parsed . scheme . lower ( ) if len ( scheme ) < 2 : scheme = config . DEFAULT_SCHEME format = os . path . splitext ( parsed . path or parsed . netloc ) [ 1 ] [ 1 : ] . lower ( ) or None if format is None : query_string = parse_qs ( parsed . query ) query_string_format = query_string . get ( "format" ) if query_string_format is not None and len ( query_string_format ) == 1 : format = query_string_format [ 0 ] if parsed . path . endswith ( 'datapackage.json' ) : return ( None , 'datapackage' ) return ( scheme , format )
Detect scheme and format based on source and return as a tuple .
23,195
def detect_encoding ( sample , encoding = None ) : from cchardet import detect if encoding is not None : return normalize_encoding ( sample , encoding ) result = detect ( sample ) confidence = result [ 'confidence' ] or 0 encoding = result [ 'encoding' ] or 'ascii' encoding = normalize_encoding ( sample , encoding ) if confidence < config . ENCODING_CONFIDENCE : encoding = config . DEFAULT_ENCODING if encoding == 'ascii' : encoding = config . DEFAULT_ENCODING return encoding
Detect encoding of a byte string sample .
23,196
def normalize_encoding ( sample , encoding ) : encoding = codecs . lookup ( encoding ) . name if encoding == 'utf-8' : if sample . startswith ( codecs . BOM_UTF8 ) : encoding = 'utf-8-sig' elif encoding == 'utf-16-be' : if sample . startswith ( codecs . BOM_UTF16_BE ) : encoding = 'utf-16' elif encoding == 'utf-16-le' : if sample . startswith ( codecs . BOM_UTF16_LE ) : encoding = 'utf-16' return encoding
Normalize encoding including utf - 8 - sig utf - 16 - be utf - 16 - le tweaks .
23,197
def detect_html ( text ) : pattern = re . compile ( '\\s*<(!doctype|html)' , re . IGNORECASE ) return bool ( pattern . match ( text ) )
Detect if text is HTML .
23,198
def reset_stream ( stream ) : try : position = stream . tell ( ) except Exception : position = True if position != 0 : try : stream . seek ( 0 ) except Exception : message = 'It\'s not possible to reset this stream' raise exceptions . TabulatorException ( message )
Reset stream pointer to the first element .
23,199
def requote_uri ( uri ) : import requests . utils if six . PY2 : def url_encode_non_ascii ( bytes ) : pattern = '[\x80-\xFF]' replace = lambda c : ( '%%%02x' % ord ( c . group ( 0 ) ) ) . upper ( ) return re . sub ( pattern , replace , bytes ) parts = urlparse ( uri ) uri = urlunparse ( part . encode ( 'idna' ) if index == 1 else url_encode_non_ascii ( part . encode ( 'utf-8' ) ) for index , part in enumerate ( parts ) ) return requests . utils . requote_uri ( uri )
Requote uri if it contains non - ascii chars spaces etc .