idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
243,500
def timing ( self , stat , delta , rate = 1 ) : if isinstance ( delta , timedelta ) : # Convert timedelta to number of milliseconds. delta = delta . total_seconds ( ) * 1000. self . _send_stat ( stat , '%0.6f|ms' % delta , rate )
Send new timing information .
68
5
243,501
def decr ( self , stat , count = 1 , rate = 1 ) : self . incr ( stat , - count , rate )
Decrement a stat by count .
29
7
243,502
def gauge ( self , stat , value , rate = 1 , delta = False ) : if value < 0 and not delta : if rate < 1 : if random . random ( ) > rate : return with self . pipeline ( ) as pipe : pipe . _send_stat ( stat , '0|g' , 1 ) pipe . _send_stat ( stat , '%s|g' % value , 1 ) else : prefix = '+' if delta and value >= 0 else '' self . _send_stat ( stat , '%s%s|g' % ( prefix , value ) , rate )
Set a gauge value .
128
5
243,503
def set ( self , stat , value , rate = 1 ) : self . _send_stat ( stat , '%s|s' % value , rate )
Set a set value .
34
5
243,504
def safe_wraps ( wrapper , * args , * * kwargs ) : while isinstance ( wrapper , functools . partial ) : wrapper = wrapper . func return functools . wraps ( wrapper , * args , * * kwargs )
Safely wraps partial functions .
54
6
243,505
def find_rule_classes ( extra_path ) : files = [ ] modules = [ ] if os . path . isfile ( extra_path ) : files = [ os . path . basename ( extra_path ) ] directory = os . path . dirname ( extra_path ) elif os . path . isdir ( extra_path ) : files = os . listdir ( extra_path ) directory = extra_path else : raise UserRuleError ( u"Invalid extra-path: {0}" . format ( extra_path ) ) # Filter out files that are not python modules for filename in files : if fnmatch . fnmatch ( filename , '*.py' ) : modules . append ( os . path . splitext ( filename ) [ 0 ] ) # No need to continue if there are no modules specified if not modules : return [ ] # Append the extra rules path to python path so that we can import them sys . path . append ( directory ) # Find all the rule classes in the found python files rule_classes = [ ] for module in modules : # Import the module try : importlib . import_module ( module ) except Exception as e : raise UserRuleError ( u"Error while importing extra-path module '{0}': {1}" . format ( module , ustr ( e ) ) ) # Find all rule classes in the module. We do this my inspecting all members of the module and checking # 1) is it a class, if not, skip # 2) is the parent path the current module. If not, we are dealing with an imported class, skip # 3) is it a subclass of rule rule_classes . extend ( [ clazz for _ , clazz in inspect . getmembers ( sys . modules [ module ] ) if inspect . isclass ( clazz ) and # check isclass to ensure clazz.__module__ exists clazz . __module__ == module and # ignore imported classes ( issubclass ( clazz , rules . LineRule ) or issubclass ( clazz , rules . CommitRule ) ) ] ) # validate that the rule classes are valid user-defined rules for rule_class in rule_classes : assert_valid_rule_class ( rule_class ) return rule_classes
Searches a given directory or python module for rule classes . This is done by adding the directory path to the python path importing the modules and then finding any Rule class in those modules .
475
38
243,506
def ustr ( obj ) : if sys . version_info [ 0 ] == 2 : # If we are getting a string, then do an explicit decode # else, just call the unicode method of the object if type ( obj ) in [ str , basestring ] : # pragma: no cover # noqa return unicode ( obj , DEFAULT_ENCODING ) # pragma: no cover # noqa else : return unicode ( obj ) # pragma: no cover # noqa else : if type ( obj ) in [ bytes ] : return obj . decode ( DEFAULT_ENCODING ) else : return str ( obj )
Python 2 and 3 utility method that converts an obj to unicode in python 2 and to a str object in python 3
137
24
243,507
def get_rule_option ( self , rule_name_or_id , option_name ) : option = self . _get_option ( rule_name_or_id , option_name ) return option . value
Returns the value of a given option for a given rule . LintConfigErrors will be raised if the rule or option don t exist .
47
29
243,508
def set_rule_option ( self , rule_name_or_id , option_name , option_value ) : option = self . _get_option ( rule_name_or_id , option_name ) try : option . set ( option_value ) except options . RuleOptionError as e : msg = u"'{0}' is not a valid value for option '{1}.{2}'. {3}." raise LintConfigError ( msg . format ( option_value , rule_name_or_id , option_name , ustr ( e ) ) )
Attempts to set a given value for a given option for a given rule . LintConfigErrors will be raised if the rule or option don t exist or if the value is invalid .
126
38
243,509
def set_from_config_file ( self , filename ) : if not os . path . exists ( filename ) : raise LintConfigError ( u"Invalid file path: {0}" . format ( filename ) ) self . _config_path = os . path . abspath ( filename ) try : parser = ConfigParser ( ) parser . read ( filename ) for section_name in parser . sections ( ) : for option_name , option_value in parser . items ( section_name ) : self . set_option ( section_name , option_name , ustr ( option_value ) ) except ConfigParserError as e : raise LintConfigError ( ustr ( e ) )
Loads lint config from a ini - style config file
146
13
243,510
def build ( self , config = None ) : # If we are passed a config object, then rebuild that object instead of building a new lintconfig object from # scratch if not config : config = LintConfig ( ) config . _config_path = self . _config_path # Set general options first as this might change the behavior or validity of the other options general_section = self . _config_blueprint . get ( 'general' ) if general_section : for option_name , option_value in general_section . items ( ) : config . set_general_option ( option_name , option_value ) for section_name , section_dict in self . _config_blueprint . items ( ) : for option_name , option_value in section_dict . items ( ) : # Skip over the general section, as we've already done that above if section_name != "general" : config . set_rule_option ( section_name , option_name , option_value ) return config
Build a real LintConfig object by normalizing and validating the options that were previously set on this factory .
215
23
243,511
def clone ( self ) : builder = LintConfigBuilder ( ) builder . _config_blueprint = copy . deepcopy ( self . _config_blueprint ) builder . _config_path = self . _config_path return builder
Creates an exact copy of a LintConfigBuilder .
50
12
243,512
def _git ( * command_parts , * * kwargs ) : # Special arguments passed to sh: http://amoffat.github.io/sh/special_arguments.html git_kwargs = { '_tty_out' : False } git_kwargs . update ( kwargs ) try : result = sh . git ( * command_parts , * * git_kwargs ) # pylint: disable=unexpected-keyword-arg # If we reach this point and the result has an exit_code that is larger than 0, this means that we didn't # get an exception (which is the default sh behavior for non-zero exit codes) and so the user is expecting # a non-zero exit code -> just return the entire result if hasattr ( result , 'exit_code' ) and result . exit_code > 0 : return result return ustr ( result ) except CommandNotFound : raise GitNotInstalledError ( ) except ErrorReturnCode as e : # Something went wrong while executing the git command error_msg = e . stderr . strip ( ) if '_cwd' in git_kwargs and b"not a git repository" in error_msg . lower ( ) : error_msg = u"{0} is not a git repository." . format ( git_kwargs [ '_cwd' ] ) else : error_msg = u"An error occurred while executing '{0}': {1}" . format ( e . full_cmd , error_msg ) raise GitContextError ( error_msg )
Convenience function for running git commands . Automatically deals with exceptions and unicode .
336
18
243,513
def git_commentchar ( ) : commentchar = _git ( "config" , "--get" , "core.commentchar" , _ok_code = [ 1 ] ) # git will return an exit code of 1 if it can't find a config value, in this case we fall-back to # as commentchar if hasattr ( commentchar , 'exit_code' ) and commentchar . exit_code == 1 : # pylint: disable=no-member commentchar = "#" return ustr ( commentchar ) . replace ( u"\n" , u"" )
Shortcut for retrieving comment char from git config
126
9
243,514
def from_full_message ( commit_msg_str ) : all_lines = commit_msg_str . splitlines ( ) try : cutline_index = all_lines . index ( GitCommitMessage . CUTLINE ) except ValueError : cutline_index = None lines = [ line for line in all_lines [ : cutline_index ] if not line . startswith ( GitCommitMessage . COMMENT_CHAR ) ] full = "\n" . join ( lines ) title = lines [ 0 ] if lines else "" body = lines [ 1 : ] if len ( lines ) > 1 else [ ] return GitCommitMessage ( original = commit_msg_str , full = full , title = title , body = body )
Parses a full git commit message by parsing a given string into the different parts of a commit message
159
21
243,515
def should_ignore_rule ( self , rule ) : return rule . id in self . config . ignore or rule . name in self . config . ignore
Determines whether a rule should be ignored based on the general list of commits to ignore
32
18
243,516
def _apply_line_rules ( lines , commit , rules , line_nr_start ) : all_violations = [ ] line_nr = line_nr_start for line in lines : for rule in rules : violations = rule . validate ( line , commit ) if violations : for violation in violations : violation . line_nr = line_nr all_violations . append ( violation ) line_nr += 1 return all_violations
Iterates over the lines in a given list of lines and validates a given list of rules against each line
93
22
243,517
def _apply_commit_rules ( rules , commit ) : all_violations = [ ] for rule in rules : violations = rule . validate ( commit ) if violations : all_violations . extend ( violations ) return all_violations
Applies a set of rules against a given commit and gitcontext
50
13
243,518
def lint ( self , commit ) : LOG . debug ( "Linting commit %s" , commit . sha or "[SHA UNKNOWN]" ) LOG . debug ( "Commit Object\n" + ustr ( commit ) ) # Apply config rules for rule in self . configuration_rules : rule . apply ( self . config , commit ) # Skip linting if this is a special commit type that is configured to be ignored ignore_commit_types = [ "merge" , "squash" , "fixup" ] for commit_type in ignore_commit_types : if getattr ( commit , "is_{0}_commit" . format ( commit_type ) ) and getattr ( self . config , "ignore_{0}_commits" . format ( commit_type ) ) : return [ ] violations = [ ] # determine violations by applying all rules violations . extend ( self . _apply_line_rules ( [ commit . message . title ] , commit , self . title_line_rules , 1 ) ) violations . extend ( self . _apply_line_rules ( commit . message . body , commit , self . body_line_rules , 2 ) ) violations . extend ( self . _apply_commit_rules ( self . commit_rules , commit ) ) # Sort violations by line number and rule_id. If there's no line nr specified (=common certain commit rules), # we replace None with -1 so that it always get's placed first. Note that we need this to do this to support # python 3, as None is not allowed in a list that is being sorted. violations . sort ( key = lambda v : ( - 1 if v . line_nr is None else v . line_nr , v . rule_id ) ) return violations
Lint the last commit in a given git context by applying all ignore title body and commit rules .
379
20
243,519
def print_violations ( self , violations ) : for v in violations : line_nr = v . line_nr if v . line_nr else "-" self . display . e ( u"{0}: {1}" . format ( line_nr , v . rule_id ) , exact = True ) self . display . ee ( u"{0}: {1} {2}" . format ( line_nr , v . rule_id , v . message ) , exact = True ) if v . content : self . display . eee ( u"{0}: {1} {2}: \"{3}\"" . format ( line_nr , v . rule_id , v . message , v . content ) , exact = True ) else : self . display . eee ( u"{0}: {1} {2}" . format ( line_nr , v . rule_id , v . message ) , exact = True )
Print a given set of violations to the standard error output
203
11
243,520
def _output ( self , message , verbosity , exact , stream ) : if exact : if self . config . verbosity == verbosity : stream . write ( message + "\n" ) else : if self . config . verbosity >= verbosity : stream . write ( message + "\n" )
Output a message if the config s verbosity is > = to the given verbosity . If exact == True the message will only be outputted if the given verbosity exactly matches the config s verbosity .
63
42
243,521
def setup_logging ( ) : root_log = logging . getLogger ( "gitlint" ) root_log . propagate = False # Don't propagate to child loggers, the gitlint root logger handles everything handler = logging . StreamHandler ( ) formatter = logging . Formatter ( LOG_FORMAT ) handler . setFormatter ( formatter ) root_log . addHandler ( handler ) root_log . setLevel ( logging . ERROR )
Setup gitlint logging
98
5
243,522
def build_config ( ctx , target , config_path , c , extra_path , ignore , verbose , silent , debug ) : config_builder = LintConfigBuilder ( ) try : # Config precedence: # First, load default config or config from configfile if config_path : config_builder . set_from_config_file ( config_path ) elif os . path . exists ( DEFAULT_CONFIG_FILE ) : config_builder . set_from_config_file ( DEFAULT_CONFIG_FILE ) # Then process any commandline configuration flags config_builder . set_config_from_string_list ( c ) # Finally, overwrite with any convenience commandline flags if ignore : config_builder . set_option ( 'general' , 'ignore' , ignore ) if silent : config_builder . set_option ( 'general' , 'verbosity' , 0 ) elif verbose > 0 : config_builder . set_option ( 'general' , 'verbosity' , verbose ) if extra_path : config_builder . set_option ( 'general' , 'extra-path' , extra_path ) if target : config_builder . set_option ( 'general' , 'target' , target ) if debug : config_builder . set_option ( 'general' , 'debug' , debug ) config = config_builder . build ( ) return config , config_builder except LintConfigError as e : click . echo ( u"Config Error: {0}" . format ( ustr ( e ) ) ) ctx . exit ( CONFIG_ERROR_CODE )
Creates a LintConfig object based on a set of commandline parameters .
345
16
243,523
def get_stdin_data ( ) : # STDIN can only be 3 different types of things ("modes") # 1. An interactive terminal device (i.e. a TTY -> sys.stdin.isatty() or stat.S_ISCHR) # 2. A (named) pipe (stat.S_ISFIFO) # 3. A regular file (stat.S_ISREG) # Technically, STDIN can also be other device type like a named unix socket (stat.S_ISSOCK), but we don't # support that in gitlint (at least not today). # # Now, the behavior that we want is the following: # If someone sends something directly to gitlint via a pipe or a regular file, read it. If not, read from the # local repository. # Note that we don't care about whether STDIN is a TTY or not, we only care whether data is via a pipe or regular # file. # However, in case STDIN is not a TTY, it HAS to be one of the 2 other things (pipe or regular file), even if # no-one is actually sending anything to gitlint over them. In this case, we still want to read from the local # repository. # To support this use-case (which is common in CI runners such as Jenkins and Gitlab), we need to actually attempt # to read from STDIN in case it's a pipe or regular file. In case that fails, then we'll fall back to reading # from the local repo. mode = os . fstat ( sys . stdin . fileno ( ) ) . st_mode stdin_is_pipe_or_file = stat . S_ISFIFO ( mode ) or stat . S_ISREG ( mode ) if stdin_is_pipe_or_file : input_data = sys . stdin . read ( ) # Only return the input data if there's actually something passed # i.e. don't consider empty piped data if input_data : return ustr ( input_data ) return False
Helper function that returns data send to stdin or False if nothing is send
446
15
243,524
def cli ( # pylint: disable=too-many-arguments ctx , target , config , c , commits , extra_path , ignore , msg_filename , verbose , silent , debug , ) : try : if debug : logging . getLogger ( "gitlint" ) . setLevel ( logging . DEBUG ) log_system_info ( ) # Get the lint config from the commandline parameters and # store it in the context (click allows storing an arbitrary object in ctx.obj). config , config_builder = build_config ( ctx , target , config , c , extra_path , ignore , verbose , silent , debug ) LOG . debug ( u"Configuration\n%s" , ustr ( config ) ) ctx . obj = ( config , config_builder , commits , msg_filename ) # If no subcommand is specified, then just lint if ctx . invoked_subcommand is None : ctx . invoke ( lint ) except GitContextError as e : click . echo ( ustr ( e ) ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Git lint tool checks your git commit messages for styling issues
243
13
243,525
def install_hook ( ctx ) : try : lint_config = ctx . obj [ 0 ] hooks . GitHookInstaller . install_commit_msg_hook ( lint_config ) # declare victory :-) hook_path = hooks . GitHookInstaller . commit_msg_hook_path ( lint_config ) click . echo ( u"Successfully installed gitlint commit-msg hook in {0}" . format ( hook_path ) ) ctx . exit ( 0 ) except hooks . GitHookInstallerError as e : click . echo ( ustr ( e ) , err = True ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Install gitlint as a git commit - msg hook .
150
12
243,526
def uninstall_hook ( ctx ) : try : lint_config = ctx . obj [ 0 ] hooks . GitHookInstaller . uninstall_commit_msg_hook ( lint_config ) # declare victory :-) hook_path = hooks . GitHookInstaller . commit_msg_hook_path ( lint_config ) click . echo ( u"Successfully uninstalled gitlint commit-msg hook from {0}" . format ( hook_path ) ) ctx . exit ( 0 ) except hooks . GitHookInstallerError as e : click . echo ( ustr ( e ) , err = True ) ctx . exit ( GIT_CONTEXT_ERROR_CODE )
Uninstall gitlint commit - msg hook .
151
10
243,527
def generate_config ( ctx ) : path = click . prompt ( 'Please specify a location for the sample gitlint config file' , default = DEFAULT_CONFIG_FILE ) path = os . path . abspath ( path ) dir_name = os . path . dirname ( path ) if not os . path . exists ( dir_name ) : click . echo ( u"Error: Directory '{0}' does not exist." . format ( dir_name ) , err = True ) ctx . exit ( USAGE_ERROR_CODE ) elif os . path . exists ( path ) : click . echo ( u"Error: File \"{0}\" already exists." . format ( path ) , err = True ) ctx . exit ( USAGE_ERROR_CODE ) LintConfigGenerator . generate_config ( path ) click . echo ( u"Successfully generated {0}" . format ( path ) ) ctx . exit ( 0 )
Generates a sample gitlint config file .
208
10
243,528
def _assert_git_repo ( target ) : hooks_dir = os . path . abspath ( os . path . join ( target , HOOKS_DIR_PATH ) ) if not os . path . isdir ( hooks_dir ) : raise GitHookInstallerError ( u"{0} is not a git repository." . format ( target ) )
Asserts that a given target directory is a git repository
79
12
243,529
def get_job_url ( config , hub , group , project ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( ( config is not None ) and ( 'group' in config ) and ( group is None ) ) : group = config [ "group" ] if ( ( config is not None ) and ( 'project' in config ) and ( project is None ) ) : project = config [ "project" ] if ( ( hub is not None ) and ( group is not None ) and ( project is not None ) ) : return '/Network/{}/Groups/{}/Projects/{}/jobs' . format ( hub , group , project ) return '/Jobs'
Util method to get job url
171
7
243,530
def get_backend_stats_url ( config , hub , backend_type ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( hub is not None ) : return '/Network/{}/devices/{}' . format ( hub , backend_type ) return '/Backends/{}' . format ( backend_type )
Util method to get backend stats url
95
8
243,531
def get_backend_url ( config , hub , group , project ) : if ( ( config is not None ) and ( 'hub' in config ) and ( hub is None ) ) : hub = config [ "hub" ] if ( ( config is not None ) and ( 'group' in config ) and ( group is None ) ) : group = config [ "group" ] if ( ( config is not None ) and ( 'project' in config ) and ( project is None ) ) : project = config [ "project" ] if ( ( hub is not None ) and ( group is not None ) and ( project is not None ) ) : return '/Network/{}/Groups/{}/Projects/{}/devices' . format ( hub , group , project ) return '/Backends'
Util method to get backend url
172
7
243,532
def obtain_token ( self , config = None ) : client_application = CLIENT_APPLICATION if self . config and ( "client_application" in self . config ) : client_application += ':' + self . config [ "client_application" ] headers = { 'x-qx-client-application' : client_application } if self . token_unique : try : response = requests . post ( str ( self . config . get ( 'url' ) + "/users/loginWithToken" ) , data = { 'apiToken' : self . token_unique } , verify = self . verify , headers = headers , * * self . extra_args ) except requests . RequestException as e : raise ApiError ( 'error during login: %s' % str ( e ) ) elif config and ( "email" in config ) and ( "password" in config ) : email = config . get ( 'email' , None ) password = config . get ( 'password' , None ) credentials = { 'email' : email , 'password' : password } try : response = requests . post ( str ( self . config . get ( 'url' ) + "/users/login" ) , data = credentials , verify = self . verify , headers = headers , * * self . extra_args ) except requests . RequestException as e : raise ApiError ( 'error during login: %s' % str ( e ) ) else : raise CredentialsError ( 'invalid token' ) if response . status_code == 401 : error_message = None try : # For 401: ACCEPT_LICENSE_REQUIRED, a detailed message is # present in the response and passed to the exception. error_message = response . json ( ) [ 'error' ] [ 'message' ] except : pass if error_message : raise CredentialsError ( 'error during login: %s' % error_message ) else : raise CredentialsError ( 'invalid token' ) try : response . raise_for_status ( ) self . data_credentials = response . json ( ) except ( requests . HTTPError , ValueError ) as e : raise ApiError ( 'error during login: %s' % str ( e ) ) if self . get_token ( ) is None : raise CredentialsError ( 'invalid token' )
Obtain the token to access to QX Platform .
506
11
243,533
def check_token ( self , respond ) : if respond . status_code == 401 : self . credential . obtain_token ( config = self . config ) return False return True
Check is the user s token is valid
37
8
243,534
def post ( self , path , params = '' , data = None ) : self . result = None data = data or { } headers = { 'Content-Type' : 'application/json' , 'x-qx-client-application' : self . client_application } url = str ( self . credential . config [ 'url' ] + path + '?access_token=' + self . credential . get_token ( ) + params ) retries = self . retries while retries > 0 : respond = requests . post ( url , data = data , headers = headers , verify = self . verify , * * self . extra_args ) if not self . check_token ( respond ) : respond = requests . post ( url , data = data , headers = headers , verify = self . verify , * * self . extra_args ) if self . _response_good ( respond ) : if self . result : return self . result elif retries < 2 : return respond . json ( ) else : retries -= 1 else : retries -= 1 time . sleep ( self . timeout_interval ) # timed out raise ApiError ( usr_msg = 'Failed to get proper ' + 'response from backend.' )
POST Method Wrapper of the REST API
262
8
243,535
def _parse_response ( self , respond ) : # convert error messages into exceptions mobj = self . _max_qubit_error_re . match ( respond . text ) if mobj : raise RegisterSizeError ( 'device register size must be <= {}' . format ( mobj . group ( 1 ) ) ) return True
parse text of response for HTTP errors
70
7
243,536
def _check_backend ( self , backend , endpoint ) : # First check against hacks for old backend names original_backend = backend backend = backend . lower ( ) if endpoint == 'experiment' : if backend in self . __names_backend_ibmqxv2 : return 'real' elif backend in self . __names_backend_ibmqxv3 : return 'ibmqx3' elif backend in self . __names_backend_simulator : return 'sim_trivial_2' # Check for new-style backends backends = self . available_backends ( ) for backend in backends : if backend [ 'name' ] == original_backend : return original_backend # backend unrecognized return None
Check if the name of a backend is valid to run in QX Platform
167
15
243,537
def get_execution ( self , id_execution , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) execution = self . req . get ( '/Executions/' + id_execution ) if "codeId" in execution : execution [ 'code' ] = self . get_code ( execution [ "codeId" ] ) return execution
Get a execution by its id
141
6
243,538
def get_result_from_execution ( self , id_execution , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) execution = self . req . get ( '/Executions/' + id_execution ) result = { } if "result" in execution and "data" in execution [ "result" ] : if execution [ "result" ] [ "data" ] . get ( 'p' , None ) : result [ "measure" ] = execution [ "result" ] [ "data" ] [ "p" ] if execution [ "result" ] [ "data" ] . get ( 'valsxyz' , None ) : result [ "bloch" ] = execution [ "result" ] [ "data" ] [ "valsxyz" ] if "additionalData" in execution [ "result" ] [ "data" ] : ad_aux = execution [ "result" ] [ "data" ] [ "additionalData" ] result [ "extraInfo" ] = ad_aux if "calibration" in execution : result [ "calibration" ] = execution [ "calibration" ] if execution [ "result" ] [ "data" ] . get ( 'cregLabels' , None ) : result [ "creg_labels" ] = execution [ "result" ] [ "data" ] [ "cregLabels" ] if execution [ "result" ] [ "data" ] . get ( 'time' , None ) : result [ "time_taken" ] = execution [ "result" ] [ "data" ] [ "time" ] return result
Get the result of a execution by the execution id
415
10
243,539
def get_code ( self , id_code , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) code = self . req . get ( '/Codes/' + id_code ) executions = self . req . get ( '/Codes/' + id_code + '/executions' , '&filter={"limit":3}' ) if isinstance ( executions , list ) : code [ "executions" ] = executions return code
Get a code by its id
161
6
243,540
def get_image_code ( self , id_code , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) return self . req . get ( '/Codes/' + id_code + '/export/png/url' )
Get the image of a code by its id
116
9
243,541
def get_last_codes ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) last = '/users/' + self . req . credential . get_user_id ( ) + '/codes/lastest' return self . req . get ( last , '&includeExecutions=true' ) [ 'codes' ]
Get the last codes of the user
137
7
243,542
def run_job ( self , job , backend = 'simulator' , shots = 1 , max_credits = None , seed = None , hub = None , group = None , project = None , hpc = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : return { "error" : "Not credentials valid" } backend_type = self . _check_backend ( backend , 'job' ) if not backend_type : raise BadBackendError ( backend ) if isinstance ( job , ( list , tuple ) ) : qasms = job for qasm in qasms : qasm [ 'qasm' ] = qasm [ 'qasm' ] . replace ( 'IBMQASM 2.0;' , '' ) qasm [ 'qasm' ] = qasm [ 'qasm' ] . replace ( 'OPENQASM 2.0;' , '' ) data = { 'qasms' : qasms , 'shots' : shots , 'backend' : { } } if max_credits : data [ 'maxCredits' ] = max_credits if seed and len ( str ( seed ) ) < 11 and str ( seed ) . isdigit ( ) : data [ 'seed' ] = seed elif seed : return { "error" : "Not seed allowed. Max 10 digits." } data [ 'backend' ] [ 'name' ] = backend_type elif isinstance ( job , dict ) : q_obj = job data = { 'qObject' : q_obj , 'backend' : { } } data [ 'backend' ] [ 'name' ] = backend_type else : return { "error" : "Not a valid data to send" } if hpc : data [ 'hpc' ] = hpc url = get_job_url ( self . config , hub , group , project ) job = self . req . post ( url , data = json . dumps ( data ) ) return job
Execute a job
479
4
243,543
def get_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/' + id_job job = self . req . get ( url ) if 'qasms' in job : for qasm in job [ 'qasms' ] : if ( 'result' in qasm ) and ( 'data' in qasm [ 'result' ] ) : qasm [ 'data' ] = qasm [ 'result' ] [ 'data' ] del qasm [ 'result' ] [ 'data' ] for key in qasm [ 'result' ] : qasm [ 'data' ] [ key ] = qasm [ 'result' ] [ key ] del qasm [ 'result' ] return job
Get the information about a job by its id
306
9
243,544
def get_jobs ( self , limit = 10 , skip = 0 , backend = None , only_completed = False , filter = None , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : return { "error" : "Not credentials valid" } url = get_job_url ( self . config , hub , group , project ) url_filter = '&filter=' query = { "order" : "creationDate DESC" , "limit" : limit , "skip" : skip , "where" : { } } if filter is not None : query [ 'where' ] = filter else : if backend is not None : query [ 'where' ] [ 'backend.name' ] = backend if only_completed : query [ 'where' ] [ 'status' ] = 'COMPLETED' url_filter = url_filter + json . dumps ( query ) jobs = self . req . get ( url , url_filter ) return jobs
Get the information about the user jobs
268
7
243,545
def get_status_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/' + id_job + '/status' status = self . req . get ( url ) return status
Get the status about a job by its id
195
9
243,546
def cancel_job ( self , id_job , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Not credentials valid" return respond if not id_job : respond = { } respond [ "status" ] = 'Error' respond [ "error" ] = "Job ID not specified" return respond url = get_job_url ( self . config , hub , group , project ) url += '/{}/cancel' . format ( id_job ) res = self . req . post ( url ) return res
Cancel the information about a job by its id
197
10
243,547
def backend_status ( self , backend = 'ibmqx4' , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) backend_type = self . _check_backend ( backend , 'status' ) if not backend_type : raise BadBackendError ( backend ) status = self . req . get ( '/Backends/' + backend_type + '/queue/status' , with_token = False ) ret = { } if 'state' in status : ret [ 'available' ] = bool ( status [ 'state' ] ) if 'busy' in status : ret [ 'busy' ] = bool ( status [ 'busy' ] ) if 'lengthQueue' in status : ret [ 'pending_jobs' ] = status [ 'lengthQueue' ] ret [ 'backend' ] = backend_type return ret
Get the status of a chip
223
6
243,548
def backend_calibration ( self , backend = 'ibmqx4' , hub = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) backend_type = self . _check_backend ( backend , 'calibration' ) if not backend_type : raise BadBackendError ( backend ) if backend_type in self . __names_backend_simulator : ret = { } return ret url = get_backend_stats_url ( self . config , hub , backend_type ) ret = self . req . get ( url + '/calibration' ) if not bool ( ret ) : ret = { } else : ret [ "backend" ] = backend_type return ret
Get the calibration of a real chip
220
7
243,549
def available_backends ( self , hub = None , group = None , project = None , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : url = get_backend_url ( self . config , hub , group , project ) ret = self . req . get ( url ) if ( ret is not None ) and ( isinstance ( ret , dict ) ) : return [ ] return [ backend for backend in ret if backend . get ( 'status' ) == 'on' ]
Get the backends available to use in the QX Platform
171
12
243,550
def available_backend_simulators ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : ret = self . req . get ( '/Backends' ) if ( ret is not None ) and ( isinstance ( ret , dict ) ) : return [ ] return [ backend for backend in ret if backend . get ( 'status' ) == 'on' and backend . get ( 'simulator' ) is True ]
Get the backend simulators available to use in the QX Platform
158
13
243,551
def get_my_credits ( self , access_token = None , user_id = None ) : if access_token : self . req . credential . set_token ( access_token ) if user_id : self . req . credential . set_user_id ( user_id ) if not self . check_credentials ( ) : raise CredentialsError ( 'credentials invalid' ) else : user_data_url = '/users/' + self . req . credential . get_user_id ( ) user_data = self . req . get ( user_data_url ) if "credit" in user_data : if "promotionalCodesUsed" in user_data [ "credit" ] : del user_data [ "credit" ] [ "promotionalCodesUsed" ] if "lastRefill" in user_data [ "credit" ] : del user_data [ "credit" ] [ "lastRefill" ] return user_data [ "credit" ] return { }
Get the credits by user to use in the QX Platform
219
12
243,552
def trace ( self , predicate ) : self . _handler = predicate if self . threading_support is None or self . threading_support : self . _threading_previous = getattr ( threading , '_trace_hook' , None ) threading . settrace ( self ) self . _previous = sys . gettrace ( ) sys . settrace ( self ) return self
Starts tracing with the given callable .
84
9
243,553
def And ( * predicates , * * kwargs ) : if kwargs : predicates += Query ( * * kwargs ) , return _flatten ( _And , * predicates )
And predicate . Returns False at the first sub - predicate that returns False .
43
15
243,554
def Or ( * predicates , * * kwargs ) : if kwargs : predicates += tuple ( Query ( * * { k : v } ) for k , v in kwargs . items ( ) ) return _flatten ( _Or , * predicates )
Or predicate . Returns True at the first sub - predicate that returns True .
59
15
243,555
def wrap ( function_to_trace = None , * * trace_options ) : def tracing_decorator ( func ) : @ functools . wraps ( func ) def tracing_wrapper ( * args , * * kwargs ) : predicates = [ ] local = trace_options . pop ( 'local' , False ) if local : predicates . append ( Q ( depth_lt = 2 ) ) predicates . append ( ~ When ( Q ( calls_gt = 0 , depth = 0 ) & ~ Q ( kind = 'return' ) , Stop ) ) local_tracer = trace ( * predicates , * * trace_options ) try : return func ( * args , * * kwargs ) finally : local_tracer . stop ( ) return tracing_wrapper if function_to_trace is None : return tracing_decorator else : return tracing_decorator ( function_to_trace )
Functions decorated with this will be traced .
197
9
243,556
def threadid ( self ) : current = self . thread . ident main = get_main_thread ( ) if main is None : return current else : return current if current != main . ident else None
Current thread ident . If current thread is main thread then it returns None .
42
15
243,557
def filename ( self , exists = os . path . exists , cython_suffix_re = CYTHON_SUFFIX_RE ) : filename = self . frame . f_globals . get ( '__file__' , '' ) if filename is None : filename = '' if filename . endswith ( ( '.pyc' , '.pyo' ) ) : filename = filename [ : - 1 ] elif filename . endswith ( '$py.class' ) : # Jython filename = filename [ : - 9 ] + ".py" elif filename . endswith ( ( '.so' , '.pyd' ) ) : basename = cython_suffix_re . sub ( '' , filename ) for ext in ( '.pyx' , '.py' ) : cyfilename = basename + ext if exists ( cyfilename ) : filename = cyfilename break return filename
A string with absolute path to file .
195
8
243,558
def stdlib ( self ) : if self . module == 'pkg_resources' or self . module . startswith ( 'pkg_resources.' ) : return False elif self . filename . startswith ( SITE_PACKAGES_PATHS ) : # if it's in site-packages then its definitely not stdlib return False elif self . filename . startswith ( SYS_PREFIX_PATHS ) : return True else : return False
A boolean flag . True if frame is in stdlib .
101
12
243,559
def _iter_symbols ( code ) : for node in ast . walk ( ast . parse ( code ) ) : if isinstance ( node , ast . Name ) : yield node . id
Iterate all the variable names in the given expression .
41
11
243,560
def __make_request_url ( self , teststep_dict , entry_json ) : request_params = utils . convert_list_to_dict ( entry_json [ "request" ] . get ( "queryString" , [ ] ) ) url = entry_json [ "request" ] . get ( "url" ) if not url : logging . exception ( "url missed in request." ) sys . exit ( 1 ) parsed_object = urlparse . urlparse ( url ) if request_params : parsed_object = parsed_object . _replace ( query = '' ) teststep_dict [ "request" ] [ "url" ] = parsed_object . geturl ( ) teststep_dict [ "request" ] [ "params" ] = request_params else : teststep_dict [ "request" ] [ "url" ] = url teststep_dict [ "name" ] = parsed_object . path
parse HAR entry request url and queryString and make teststep url and params
199
15
243,561
def __make_request_method ( self , teststep_dict , entry_json ) : method = entry_json [ "request" ] . get ( "method" ) if not method : logging . exception ( "method missed in request." ) sys . exit ( 1 ) teststep_dict [ "request" ] [ "method" ] = method
parse HAR entry request method and make teststep method .
74
11
243,562
def __make_request_headers ( self , teststep_dict , entry_json ) : teststep_headers = { } for header in entry_json [ "request" ] . get ( "headers" , [ ] ) : if header [ "name" ] . lower ( ) in IGNORE_REQUEST_HEADERS : continue teststep_headers [ header [ "name" ] ] = header [ "value" ] if teststep_headers : teststep_dict [ "request" ] [ "headers" ] = teststep_headers
parse HAR entry request headers and make teststep headers . header in IGNORE_REQUEST_HEADERS will be ignored .
115
25
243,563
def _make_request_data ( self , teststep_dict , entry_json ) : method = entry_json [ "request" ] . get ( "method" ) if method in [ "POST" , "PUT" , "PATCH" ] : postData = entry_json [ "request" ] . get ( "postData" , { } ) mimeType = postData . get ( "mimeType" ) # Note that text and params fields are mutually exclusive. if "text" in postData : post_data = postData . get ( "text" ) else : params = postData . get ( "params" , [ ] ) post_data = utils . convert_list_to_dict ( params ) request_data_key = "data" if not mimeType : pass elif mimeType . startswith ( "application/json" ) : try : post_data = json . loads ( post_data ) request_data_key = "json" except JSONDecodeError : pass elif mimeType . startswith ( "application/x-www-form-urlencoded" ) : post_data = utils . convert_x_www_form_urlencoded_to_dict ( post_data ) else : # TODO: make compatible with more mimeType pass teststep_dict [ "request" ] [ request_data_key ] = post_data
parse HAR entry request data and make teststep request data
306
11
243,564
def _make_validate ( self , teststep_dict , entry_json ) : teststep_dict [ "validate" ] . append ( { "eq" : [ "status_code" , entry_json [ "response" ] . get ( "status" ) ] } ) resp_content_dict = entry_json [ "response" ] . get ( "content" ) headers_mapping = utils . convert_list_to_dict ( entry_json [ "response" ] . get ( "headers" , [ ] ) ) if "Content-Type" in headers_mapping : teststep_dict [ "validate" ] . append ( { "eq" : [ "headers.Content-Type" , headers_mapping [ "Content-Type" ] ] } ) text = resp_content_dict . get ( "text" ) if not text : return mime_type = resp_content_dict . get ( "mimeType" ) if mime_type and mime_type . startswith ( "application/json" ) : encoding = resp_content_dict . get ( "encoding" ) if encoding and encoding == "base64" : content = base64 . b64decode ( text ) . decode ( 'utf-8' ) else : content = text try : resp_content_json = json . loads ( content ) except JSONDecodeError : logging . warning ( "response content can not be loaded as json: {}" . format ( content . encode ( "utf-8" ) ) ) return if not isinstance ( resp_content_json , dict ) : return for key , value in resp_content_json . items ( ) : if isinstance ( value , ( dict , list ) ) : continue teststep_dict [ "validate" ] . append ( { "eq" : [ "content.{}" . format ( key ) , value ] } )
parse HAR entry response and make teststep validate .
412
10
243,565
def load_har_log_entries ( file_path ) : with io . open ( file_path , "r+" , encoding = "utf-8-sig" ) as f : try : content_json = json . loads ( f . read ( ) ) return content_json [ "log" ] [ "entries" ] except ( KeyError , TypeError ) : logging . error ( "HAR file content error: {}" . format ( file_path ) ) sys . exit ( 1 )
load HAR file and return log entries list
110
8
243,566
def x_www_form_urlencoded ( post_data ) : if isinstance ( post_data , dict ) : return "&" . join ( [ u"{}={}" . format ( key , value ) for key , value in post_data . items ( ) ] ) else : return post_data
convert origin dict to x - www - form - urlencoded
68
14
243,567
def convert_x_www_form_urlencoded_to_dict ( post_data ) : if isinstance ( post_data , str ) : converted_dict = { } for k_v in post_data . split ( "&" ) : try : key , value = k_v . split ( "=" ) except ValueError : raise Exception ( "Invalid x_www_form_urlencoded data format: {}" . format ( post_data ) ) converted_dict [ key ] = unquote ( value ) return converted_dict else : return post_data
convert x_www_form_urlencoded data to dict
123
14
243,568
def dump_yaml ( testcase , yaml_file ) : logging . info ( "dump testcase to YAML format." ) with io . open ( yaml_file , 'w' , encoding = "utf-8" ) as outfile : yaml . dump ( testcase , outfile , allow_unicode = True , default_flow_style = False , indent = 4 ) logging . info ( "Generate YAML testcase successfully: {}" . format ( yaml_file ) )
dump HAR entries to yaml testcase
111
8
243,569
def dump_json ( testcase , json_file ) : logging . info ( "dump testcase to JSON format." ) with io . open ( json_file , 'w' , encoding = "utf-8" ) as outfile : my_json_str = json . dumps ( testcase , ensure_ascii = ensure_ascii , indent = 4 ) if isinstance ( my_json_str , bytes ) : my_json_str = my_json_str . decode ( "utf-8" ) outfile . write ( my_json_str ) logging . info ( "Generate JSON testcase successfully: {}" . format ( json_file ) )
dump HAR entries to json testcase
146
7
243,570
def prepare_request ( self , request ) : try : request_id = local . request_id except AttributeError : request_id = NO_REQUEST_ID if self . request_id_header and request_id != NO_REQUEST_ID : request . headers [ self . request_id_header ] = request_id return super ( Session , self ) . prepare_request ( request )
Include the request ID if available in the outgoing request
86
11
243,571
def _get ( self , url , params = None , headers = None ) : url = self . clean_url ( url ) response = requests . get ( url , params = params , verify = self . verify , timeout = self . timeout , headers = headers ) return response
Wraps a GET request with a url check
57
9
243,572
def _post ( self , url , data = None , json = None , params = None , headers = None ) : url = self . clean_url ( url ) response = requests . post ( url , data = data , json = json , params = params , headers = headers , timeout = self . timeout , verify = self . verify ) return response
Wraps a POST request with a url check
73
9
243,573
def expand ( self , url ) : url = self . clean_url ( url ) response = self . _get ( url ) if response . ok : return response . url raise ExpandingErrorException
Base expand method . Only visits the link and return the response url
41
13
243,574
def clean_url ( url ) : if not url . startswith ( ( 'http://' , 'https://' ) ) : url = f'http://{url}' if not URL_RE . match ( url ) : raise BadURLException ( f'{url} is not valid' ) return url
URL Validation function
69
4
243,575
def create_function_from_request_pdu ( pdu ) : function_code = get_function_code_from_request_pdu ( pdu ) try : function_class = function_code_to_function_map [ function_code ] except KeyError : raise IllegalFunctionError ( function_code ) return function_class . create_from_request_pdu ( pdu )
Return function instance based on request PDU .
86
9
243,576
def request_pdu ( self ) : if None in [ self . starting_address , self . quantity ] : # TODO Raise proper exception. raise Exception return struct . pack ( '>BHH' , self . function_code , self . starting_address , self . quantity )
Build request PDU to read coils .
60
8
243,577
def request_pdu ( self ) : if None in [ self . address , self . value ] : # TODO Raise proper exception. raise Exception return struct . pack ( '>BHH' , self . function_code , self . address , self . _value )
Build request PDU to write single coil .
57
9
243,578
def value ( self , value ) : try : struct . pack ( '>' + conf . TYPE_CHAR , value ) except struct . error : raise IllegalDataValueError self . _value = value
Value to be written on register .
42
7
243,579
def request_pdu ( self ) : if None in [ self . address , self . value ] : # TODO Raise proper exception. raise Exception return struct . pack ( '>BH' + conf . TYPE_CHAR , self . function_code , self . address , self . value )
Build request PDU to write single register .
62
9
243,580
def serve_forever ( self , poll_interval = 0.5 ) : self . serial_port . timeout = poll_interval while not self . _shutdown_request : try : self . serve_once ( ) except ( CRCError , struct . error ) as e : log . error ( 'Can\'t handle request: {0}' . format ( e ) ) except ( SerialTimeoutException , ValueError ) : pass
Wait for incomming requests .
95
7
243,581
def execute_route ( self , meta_data , request_pdu ) : try : function = create_function_from_request_pdu ( request_pdu ) results = function . execute ( meta_data [ 'unit_id' ] , self . route_map ) try : # ReadFunction's use results of callbacks to build response # PDU... return function . create_response_pdu ( results ) except TypeError : # ...other functions don't. return function . create_response_pdu ( ) except ModbusError as e : function_code = get_function_code_from_request_pdu ( request_pdu ) return pack_exception_pdu ( function_code , e . error_code ) except Exception as e : log . exception ( 'Could not handle request: {0}.' . format ( e ) ) function_code = get_function_code_from_request_pdu ( request_pdu ) return pack_exception_pdu ( function_code , ServerDeviceFailureError . error_code )
Execute configured route based on requests meta data and request PDU .
229
14
243,582
def serial_port ( self , serial_port ) : char_size = get_char_size ( serial_port . baudrate ) # See docstring of get_char_size() for meaning of constants below. serial_port . inter_byte_timeout = 1.5 * char_size serial_port . timeout = 3.5 * char_size self . _serial_port = serial_port
Set timeouts on serial port based on baudrate to detect frames .
87
15
243,583
def serve_once ( self ) : # 256 is the maximum size of a Modbus RTU frame. request_adu = self . serial_port . read ( 256 ) log . debug ( '<-- {0}' . format ( hexlify ( request_adu ) ) ) if len ( request_adu ) == 0 : raise ValueError response_adu = self . process ( request_adu ) self . respond ( response_adu )
Listen and handle 1 request .
99
6
243,584
def generate_look_up_table ( ) : poly = 0xA001 table = [ ] for index in range ( 256 ) : data = index << 1 crc = 0 for _ in range ( 8 , 0 , - 1 ) : data >>= 1 if ( data ^ crc ) & 0x0001 : crc = ( crc >> 1 ) ^ poly else : crc >>= 1 table . append ( crc ) return table
Generate look up table .
94
6
243,585
def get_crc ( msg ) : register = 0xFFFF for byte_ in msg : try : val = struct . unpack ( '<B' , byte_ ) [ 0 ] # Iterating over a bit-like objects in Python 3 gets you ints. # Because fuck logic. except TypeError : val = byte_ register = ( register >> 8 ) ^ look_up_table [ ( register ^ val ) & 0xFF ] # CRC is little-endian! return struct . pack ( '<H' , register )
Return CRC of 2 byte for message .
114
8
243,586
def validate_crc ( msg ) : if not struct . unpack ( '<H' , get_crc ( msg [ : - 2 ] ) ) == struct . unpack ( '<H' , msg [ - 2 : ] ) : raise CRCError ( 'CRC validation failed.' )
Validate CRC of message .
66
6
243,587
def _create_request_adu ( slave_id , req_pdu ) : first_part_adu = struct . pack ( '>B' , slave_id ) + req_pdu return first_part_adu + get_crc ( first_part_adu )
Return request ADU for Modbus RTU .
64
10
243,588
def send_message ( adu , serial_port ) : serial_port . write ( adu ) serial_port . flush ( ) # Check exception ADU (which is shorter than all other responses) first. exception_adu_size = 5 response_error_adu = recv_exactly ( serial_port . read , exception_adu_size ) raise_for_exception_adu ( response_error_adu ) expected_response_size = expected_response_pdu_size_from_request_pdu ( adu [ 1 : - 2 ] ) + 3 response_remainder = recv_exactly ( serial_port . read , expected_response_size - exception_adu_size ) return parse_response_adu ( response_error_adu + response_remainder , adu )
Send ADU over serial to to server and return parsed response .
184
13
243,589
def pack_mbap ( transaction_id , protocol_id , length , unit_id ) : return struct . pack ( '>HHHB' , transaction_id , protocol_id , length , unit_id )
Create and return response MBAP .
46
7
243,590
def memoize ( f ) : cache = { } @ wraps ( f ) def inner ( arg ) : if arg not in cache : cache [ arg ] = f ( arg ) return cache [ arg ] return inner
Decorator which caches function s return value each it is called . If called later with same arguments the cached value is returned .
44
26
243,591
def recv_exactly ( recv_fn , size ) : recv_bytes = 0 chunks = [ ] while recv_bytes < size : chunk = recv_fn ( size - recv_bytes ) if len ( chunk ) == 0 : # when closed or empty break recv_bytes += len ( chunk ) chunks . append ( chunk ) response = b'' . join ( chunks ) if len ( response ) != size : raise ValueError return response
Use the function to read and return exactly number of bytes desired .
98
13
243,592
def _create_mbap_header ( slave_id , pdu ) : # 65535 = (2**16)-1 aka maximum number that fits in 2 bytes. transaction_id = randint ( 0 , 65535 ) length = len ( pdu ) + 1 return struct . pack ( '>HHHB' , transaction_id , 0 , length , slave_id )
Return byte array with MBAP header for PDU .
80
11
243,593
def send_message ( adu , sock ) : sock . sendall ( adu ) # Check exception ADU (which is shorter than all other responses) first. exception_adu_size = 9 response_error_adu = recv_exactly ( sock . recv , exception_adu_size ) raise_for_exception_adu ( response_error_adu ) expected_response_size = expected_response_pdu_size_from_request_pdu ( adu [ 7 : ] ) + 7 response_remainder = recv_exactly ( sock . recv , expected_response_size - exception_adu_size ) return parse_response_adu ( response_error_adu + response_remainder , adu )
Send ADU over socket to to server and return parsed response .
170
13
243,594
def get_serial_port ( ) : port = Serial ( port = '/dev/ttyS1' , baudrate = 9600 , parity = PARITY_NONE , stopbits = 1 , bytesize = 8 , timeout = 1 ) fh = port . fileno ( ) # A struct with configuration for serial port. serial_rs485 = struct . pack ( 'hhhhhhhh' , 1 , 0 , 0 , 0 , 0 , 0 , 0 , 0 ) fcntl . ioctl ( fh , 0x542F , serial_rs485 ) return port
Return serial . Serial instance ready to use for RS485 .
124
12
243,595
def _set_multi_bit_value_format_character ( self ) : self . MULTI_BIT_VALUE_FORMAT_CHARACTER = self . MULTI_BIT_VALUE_FORMAT_CHARACTER . upper ( ) if self . SIGNED_VALUES : self . MULTI_BIT_VALUE_FORMAT_CHARACTER = self . MULTI_BIT_VALUE_FORMAT_CHARACTER . lower ( )
Set format character for multibit values .
100
9
243,596
def get_filename4code ( module , content , ext = None ) : imagedir = module + "-images" fn = hashlib . sha1 ( content . encode ( sys . getfilesystemencoding ( ) ) ) . hexdigest ( ) try : os . mkdir ( imagedir ) sys . stderr . write ( 'Created directory ' + imagedir + '\n' ) except OSError : pass if ext : fn += "." + ext return os . path . join ( imagedir , fn )
Generate filename based on content
117
6
243,597
def toJSONFilters ( actions ) : try : input_stream = io . TextIOWrapper ( sys . stdin . buffer , encoding = 'utf-8' ) except AttributeError : # Python 2 does not have sys.stdin.buffer. # REF: https://stackoverflow.com/questions/2467928/python-unicodeencode input_stream = codecs . getreader ( "utf-8" ) ( sys . stdin ) source = input_stream . read ( ) if len ( sys . argv ) > 1 : format = sys . argv [ 1 ] else : format = "" sys . stdout . write ( applyJSONFilters ( actions , source , format ) )
Generate a JSON - to - JSON filter from stdin to stdout
156
15
243,598
def applyJSONFilters ( actions , source , format = "" ) : doc = json . loads ( source ) if 'meta' in doc : meta = doc [ 'meta' ] elif doc [ 0 ] : # old API meta = doc [ 0 ] [ 'unMeta' ] else : meta = { } altered = doc for action in actions : altered = walk ( altered , action , format , meta ) return json . dumps ( altered )
Walk through JSON structure and apply filters
93
7
243,599
def stringify ( x ) : result = [ ] def go ( key , val , format , meta ) : if key in [ 'Str' , 'MetaString' ] : result . append ( val ) elif key == 'Code' : result . append ( val [ 1 ] ) elif key == 'Math' : result . append ( val [ 1 ] ) elif key == 'LineBreak' : result . append ( " " ) elif key == 'SoftBreak' : result . append ( " " ) elif key == 'Space' : result . append ( " " ) walk ( x , go , "" , { } ) return '' . join ( result )
Walks the tree x and returns concatenated string content leaving out all formatting .
142
17