idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
16,900
def set_rules ( self , rules ) : options = { } if rules : if rules . get ( "ignore" ) : options [ "rules" ] = { } for rule in rules . get ( "ignore" ) : options [ "rules" ] [ rule ] = { "enabled" : False } elif rules . get ( "apply" ) : options [ "runOnly" ] = { "type" : "rule" , "values" : rules . get ( "apply" ) , } elif rules . get ( "tags" ) : options [ "runOnly" ] = { "type" : "tag" , "values" : rules . get ( "tags" ) , } self . rules = json . dumps ( options )
Set rules to ignore XOR limit to when checking for accessibility errors on the page .
159
17
16,901
def customize_ruleset ( self , custom_ruleset_file = None ) : custom_file = custom_ruleset_file or os . environ . get ( "BOKCHOY_A11Y_CUSTOM_RULES_FILE" ) if not custom_file : return with open ( custom_file , "r" ) as additional_rules : custom_rules = additional_rules . read ( ) if "var customRules" not in custom_rules : raise A11yAuditConfigError ( "Custom rules file must include \"var customRules\"" ) self . custom_rules = custom_rules
Updates the ruleset to include a set of custom rules . These rules will be _added_ to the existing ruleset or replace the existing rule with the same ID .
134
35
16,902
def _check_rules ( browser , rules_js , config ) : audit_run_script = dedent ( u""" {rules_js} {custom_rules} axe.configure(customRules); var callback = function(err, results) {{ if (err) throw err; window.a11yAuditResults = JSON.stringify(results); window.console.log(window.a11yAuditResults); }} axe.run({context}, {options}, callback); """ ) . format ( rules_js = rules_js , custom_rules = config . custom_rules , context = config . context , options = config . rules ) audit_results_script = dedent ( u""" window.console.log(window.a11yAuditResults); return window.a11yAuditResults; """ ) browser . execute_script ( audit_run_script ) def audit_results_check_func ( ) : """ A method to check that the audit has completed. Returns: (True, results) if the results are available. (False, None) if the results aren't available. """ unicode_results = browser . execute_script ( audit_results_script ) try : results = json . loads ( unicode_results ) except ( TypeError , ValueError ) : results = None if results : return True , results return False , None result = Promise ( audit_results_check_func , "Timed out waiting for a11y audit results." , timeout = 5 , ) . fulfill ( ) # audit_results is report of accessibility violations for that session # Note that this ruleset doesn't have distinct error/warning levels. audit_results = result . get ( 'violations' ) return audit_results
Run an accessibility audit on the page using the axe - core ruleset .
368
15
16,903
def save_source ( driver , name ) : source = driver . page_source file_name = os . path . join ( os . environ . get ( 'SAVED_SOURCE_DIR' ) , '{name}.html' . format ( name = name ) ) try : with open ( file_name , 'wb' ) as output_file : output_file . write ( source . encode ( 'utf-8' ) ) except Exception : # pylint: disable=broad-except msg = u"Could not save the browser page source to {}." . format ( file_name ) LOGGER . warning ( msg )
Save the rendered HTML of the browser .
135
8
16,904
def save_screenshot ( driver , name ) : if hasattr ( driver , 'save_screenshot' ) : screenshot_dir = os . environ . get ( 'SCREENSHOT_DIR' ) if not screenshot_dir : LOGGER . warning ( 'The SCREENSHOT_DIR environment variable was not set; not saving a screenshot' ) return elif not os . path . exists ( screenshot_dir ) : os . makedirs ( screenshot_dir ) image_name = os . path . join ( screenshot_dir , name + '.png' ) driver . save_screenshot ( image_name ) else : msg = ( u"Browser does not support screenshots. " u"Could not save screenshot '{name}'" ) . format ( name = name ) LOGGER . warning ( msg )
Save a screenshot of the browser .
174
7
16,905
def save_driver_logs ( driver , prefix ) : browser_name = os . environ . get ( 'SELENIUM_BROWSER' , 'firefox' ) log_dir = os . environ . get ( 'SELENIUM_DRIVER_LOG_DIR' ) if not log_dir : LOGGER . warning ( 'The SELENIUM_DRIVER_LOG_DIR environment variable was not set; not saving logs' ) return elif not os . path . exists ( log_dir ) : os . makedirs ( log_dir ) if browser_name == 'firefox' : # Firefox doesn't yet provide logs to Selenium, but does log to a separate file # https://github.com/mozilla/geckodriver/issues/284 # https://firefox-source-docs.mozilla.org/testing/geckodriver/geckodriver/TraceLogs.html log_path = os . path . join ( os . getcwd ( ) , 'geckodriver.log' ) if os . path . exists ( log_path ) : dest_path = os . path . join ( log_dir , '{}_geckodriver.log' . format ( prefix ) ) copyfile ( log_path , dest_path ) return log_types = driver . log_types for log_type in log_types : try : log = driver . get_log ( log_type ) file_name = os . path . join ( log_dir , '{}_{}.log' . format ( prefix , log_type ) ) with open ( file_name , 'w' ) as output_file : for line in log : output_file . write ( "{}{}" . format ( dumps ( line ) , '\n' ) ) except : # pylint: disable=bare-except msg = ( u"Could not save browser log of type '{log_type}'. " u"It may be that the browser does not support it." ) . format ( log_type = log_type ) LOGGER . warning ( msg , exc_info = True )
Save the selenium driver logs .
463
8
16,906
def browser ( tags = None , proxy = None , other_caps = None ) : browser_name = os . environ . get ( 'SELENIUM_BROWSER' , 'firefox' ) def browser_check_func ( ) : """ Instantiate the browser and return the browser instance """ # See https://openedx.atlassian.net/browse/TE-701 try : # Get the class and kwargs required to instantiate the browser based on # whether we are using a local or remote one. if _use_remote_browser ( SAUCE_ENV_VARS ) : browser_class , browser_args , browser_kwargs = _remote_browser_class ( SAUCE_ENV_VARS , tags ) elif _use_remote_browser ( REMOTE_ENV_VARS ) : browser_class , browser_args , browser_kwargs = _remote_browser_class ( REMOTE_ENV_VARS , tags ) else : browser_class , browser_args , browser_kwargs = _local_browser_class ( browser_name ) # If we are using a proxy, we need extra kwargs passed on intantiation. if proxy : browser_kwargs = _proxy_kwargs ( browser_name , proxy , browser_kwargs ) # Load in user given desired caps but override with derived caps from above. This is to retain existing # behavior. Only for remote drivers, where various testing services use this info for configuration. if browser_class == webdriver . Remote : desired_caps = other_caps or { } desired_caps . update ( browser_kwargs . get ( 'desired_capabilities' , { } ) ) browser_kwargs [ 'desired_capabilities' ] = desired_caps return True , browser_class ( * browser_args , * * browser_kwargs ) except ( socket . error , WebDriverException ) as err : msg = str ( err ) LOGGER . debug ( 'Failed to instantiate browser: ' + msg ) return False , None browser_instance = Promise ( # There are cases where selenium takes 30s to return with a failure, so in order to try 3 # times, we set a long timeout. If there is a hang on the first try, the timeout will # be enforced. browser_check_func , "Browser is instantiated successfully." , try_limit = 3 , timeout = 95 ) . fulfill ( ) return browser_instance
Interpret environment variables to configure Selenium . Performs validation logging and sensible defaults .
530
17
16,907
def _firefox_profile ( ) : profile_dir = os . environ . get ( FIREFOX_PROFILE_ENV_VAR ) if profile_dir : LOGGER . info ( u"Using firefox profile: %s" , profile_dir ) try : firefox_profile = webdriver . FirefoxProfile ( profile_dir ) except OSError as err : if err . errno == errno . ENOENT : raise BrowserConfigError ( u"Firefox profile directory {env_var}={profile_dir} does not exist" . format ( env_var = FIREFOX_PROFILE_ENV_VAR , profile_dir = profile_dir ) ) elif err . errno == errno . EACCES : raise BrowserConfigError ( u"Firefox profile directory {env_var}={profile_dir} has incorrect permissions. It must be \ readable and executable." . format ( env_var = FIREFOX_PROFILE_ENV_VAR , profile_dir = profile_dir ) ) else : # Some other OSError: raise BrowserConfigError ( u"Problem with firefox profile directory {env_var}={profile_dir}: {msg}" . format ( env_var = FIREFOX_PROFILE_ENV_VAR , profile_dir = profile_dir , msg = str ( err ) ) ) else : LOGGER . info ( "Using default firefox profile" ) firefox_profile = webdriver . FirefoxProfile ( ) # Bypasses the security prompt displayed by the browser when it attempts to # access a media device (e.g., a webcam) firefox_profile . set_preference ( 'media.navigator.permission.disabled' , True ) # Disable the initial url fetch to 'learn more' from mozilla (so you don't have to # be online to run bok-choy on firefox) firefox_profile . set_preference ( 'browser.startup.homepage' , 'about:blank' ) firefox_profile . set_preference ( 'startup.homepage_welcome_url' , 'about:blank' ) firefox_profile . set_preference ( 'startup.homepage_welcome_url.additional' , 'about:blank' ) # Disable fetching an updated version of firefox firefox_profile . set_preference ( 'app.update.enabled' , False ) # Disable plugin checking firefox_profile . set_preference ( 'plugins.hide_infobar_for_outdated_plugin' , True ) # Disable health reporter firefox_profile . set_preference ( 'datareporting.healthreport.service.enabled' , False ) # Disable all data upload (Telemetry and FHR) firefox_profile . set_preference ( 'datareporting.policy.dataSubmissionEnabled' , False ) # Disable crash reporter firefox_profile . set_preference ( 'toolkit.crashreporter.enabled' , False ) # Disable the JSON Viewer firefox_profile . set_preference ( 'devtools.jsonview.enabled' , False ) # Grant OS focus to the launched browser so focus-related tests function correctly firefox_profile . set_preference ( 'focusmanager.testmode' , True ) for function in FIREFOX_PROFILE_CUSTOMIZERS : function ( firefox_profile ) return firefox_profile
Configure the Firefox profile respecting FIREFOX_PROFILE_PATH if set
741
15
16,908
def _local_browser_class ( browser_name ) : # Log name of local browser LOGGER . info ( u"Using local browser: %s [Default is firefox]" , browser_name ) # Get class of local browser based on name browser_class = BROWSERS . get ( browser_name ) headless = os . environ . get ( 'BOKCHOY_HEADLESS' , 'false' ) . lower ( ) == 'true' if browser_class is None : raise BrowserConfigError ( u"Invalid browser name {name}. Options are: {options}" . format ( name = browser_name , options = ", " . join ( list ( BROWSERS . keys ( ) ) ) ) ) else : if browser_name == 'firefox' : # Remove geckodriver log data from previous test cases log_path = os . path . join ( os . getcwd ( ) , 'geckodriver.log' ) if os . path . exists ( log_path ) : os . remove ( log_path ) firefox_options = FirefoxOptions ( ) firefox_options . log . level = 'trace' if headless : firefox_options . headless = True browser_args = [ ] browser_kwargs = { 'firefox_profile' : _firefox_profile ( ) , 'options' : firefox_options , } firefox_path = os . environ . get ( 'SELENIUM_FIREFOX_PATH' ) firefox_log = os . environ . get ( 'SELENIUM_FIREFOX_LOG' ) if firefox_path and firefox_log : browser_kwargs . update ( { 'firefox_binary' : FirefoxBinary ( firefox_path = firefox_path , log_file = firefox_log ) } ) elif firefox_path : browser_kwargs . update ( { 'firefox_binary' : FirefoxBinary ( firefox_path = firefox_path ) } ) elif firefox_log : browser_kwargs . update ( { 'firefox_binary' : FirefoxBinary ( log_file = firefox_log ) } ) elif browser_name == 'chrome' : chrome_options = ChromeOptions ( ) if headless : chrome_options . headless = True # Emulate webcam and microphone for testing purposes chrome_options . add_argument ( '--use-fake-device-for-media-stream' ) # Bypasses the security prompt displayed by the browser when it attempts to # access a media device (e.g., a webcam) chrome_options . add_argument ( '--use-fake-ui-for-media-stream' ) browser_args = [ ] browser_kwargs = { 'options' : chrome_options , } else : browser_args , browser_kwargs = [ ] , { } return browser_class , browser_args , browser_kwargs
Returns class kwargs and args needed to instantiate the local browser .
634
15
16,909
def _remote_browser_class ( env_vars , tags = None ) : if tags is None : tags = [ ] # Interpret the environment variables, raising an exception if they're # invalid envs = _required_envs ( env_vars ) envs . update ( _optional_envs ( ) ) # Turn the environment variables into a dictionary of desired capabilities caps = _capabilities_dict ( envs , tags ) if 'accessKey' in caps : LOGGER . info ( u"Using SauceLabs: %s %s %s" , caps [ 'platform' ] , caps [ 'browserName' ] , caps [ 'version' ] ) else : LOGGER . info ( u"Using Remote Browser: %s" , caps [ 'browserName' ] ) # Create and return a new Browser # We assume that the WebDriver end-point is running locally (e.g. using # SauceConnect) url = u"http://{0}:{1}/wd/hub" . format ( envs [ 'SELENIUM_HOST' ] , envs [ 'SELENIUM_PORT' ] ) browser_args = [ ] browser_kwargs = { 'command_executor' : url , 'desired_capabilities' : caps , } if caps [ 'browserName' ] == 'firefox' : browser_kwargs [ 'browser_profile' ] = _firefox_profile ( ) return webdriver . Remote , browser_args , browser_kwargs
Returns class kwargs and args needed to instantiate the remote browser .
323
15
16,910
def _proxy_kwargs ( browser_name , proxy , browser_kwargs = { } ) : # pylint: disable=dangerous-default-value proxy_dict = { "httpProxy" : proxy . proxy , "proxyType" : 'manual' , } if browser_name == 'firefox' and 'desired_capabilities' not in browser_kwargs : # This one works for firefox locally wd_proxy = webdriver . common . proxy . Proxy ( proxy_dict ) browser_kwargs [ 'proxy' ] = wd_proxy else : # This one works with chrome, both locally and remote # This one works with firefox remote, but not locally if 'desired_capabilities' not in browser_kwargs : browser_kwargs [ 'desired_capabilities' ] = { } browser_kwargs [ 'desired_capabilities' ] [ 'proxy' ] = proxy_dict return browser_kwargs
Determines the kwargs needed to set up a proxy based on the browser type .
207
19
16,911
def _required_envs ( env_vars ) : envs = { key : os . environ . get ( key ) for key in env_vars } # Check for missing keys missing = [ key for key , val in list ( envs . items ( ) ) if val is None ] if missing : msg = ( u"These environment variables must be set: " + u", " . join ( missing ) ) raise BrowserConfigError ( msg ) # Check that we support this browser if envs [ 'SELENIUM_BROWSER' ] not in BROWSERS : msg = u"Unsuppported browser: {0}" . format ( envs [ 'SELENIUM_BROWSER' ] ) raise BrowserConfigError ( msg ) return envs
Parse environment variables for required values raising a BrowserConfig error if they are not found .
165
18
16,912
def _optional_envs ( ) : envs = { key : os . environ . get ( key ) for key in OPTIONAL_ENV_VARS if key in os . environ } # If we're using Jenkins, check that we have all the required info if 'JOB_NAME' in envs and 'BUILD_NUMBER' not in envs : raise BrowserConfigError ( "Missing BUILD_NUMBER environment var" ) if 'BUILD_NUMBER' in envs and 'JOB_NAME' not in envs : raise BrowserConfigError ( "Missing JOB_NAME environment var" ) return envs
Parse environment variables for optional values raising a BrowserConfig error if they are insufficiently specified .
139
19
16,913
def _capabilities_dict ( envs , tags ) : capabilities = { 'browserName' : envs [ 'SELENIUM_BROWSER' ] , 'acceptInsecureCerts' : bool ( envs . get ( 'SELENIUM_INSECURE_CERTS' , False ) ) , 'video-upload-on-pass' : False , 'sauce-advisor' : False , 'capture-html' : True , 'record-screenshots' : True , 'max-duration' : 600 , 'public' : 'public restricted' , 'tags' : tags , } # Add SauceLabs specific environment vars if they are set. if _use_remote_browser ( SAUCE_ENV_VARS ) : sauce_capabilities = { 'platform' : envs [ 'SELENIUM_PLATFORM' ] , 'version' : envs [ 'SELENIUM_VERSION' ] , 'username' : envs [ 'SAUCE_USER_NAME' ] , 'accessKey' : envs [ 'SAUCE_API_KEY' ] , } capabilities . update ( sauce_capabilities ) # Optional: Add in Jenkins-specific environment variables # to link Sauce output with the Jenkins job if 'JOB_NAME' in envs : jenkins_vars = { 'build' : envs [ 'BUILD_NUMBER' ] , 'name' : envs [ 'JOB_NAME' ] , } capabilities . update ( jenkins_vars ) return capabilities
Convert the dictionary of environment variables to a dictionary of desired capabilities to send to the Remote WebDriver .
340
21
16,914
def replace ( self , * * kwargs ) : clone = copy ( self ) clone . transforms = list ( clone . transforms ) for key , value in kwargs . items ( ) : if not hasattr ( clone , key ) : raise TypeError ( u'replace() got an unexpected keyword argument {!r}' . format ( key ) ) setattr ( clone , key , value ) return clone
Return a copy of this Query but with attributes specified as keyword arguments replaced by the keyword values .
86
19
16,915
def transform ( self , transform , desc = None ) : if desc is None : desc = u'transform({})' . format ( getattr ( transform , '__name__' , '' ) ) return self . replace ( transforms = self . transforms + [ transform ] , desc_stack = self . desc_stack + [ desc ] )
Create a copy of this query transformed by transform .
72
10
16,916
def map ( self , map_fn , desc = None ) : if desc is None : desc = getattr ( map_fn , '__name__' , '' ) desc = u'map({})' . format ( desc ) return self . transform ( lambda xs : ( map_fn ( x ) for x in xs ) , desc = desc )
Return a copy of this query with the values mapped through map_fn .
76
15
16,917
def filter ( self , filter_fn = None , desc = None , * * kwargs ) : if filter_fn is not None and kwargs : raise TypeError ( 'Must supply either a filter_fn or attribute filter parameters to filter(), but not both.' ) if filter_fn is None and not kwargs : raise TypeError ( 'Must supply one of filter_fn or one or more attribute filter parameters to filter().' ) if desc is None : if filter_fn is not None : desc = getattr ( filter_fn , '__name__' , '' ) elif kwargs : desc = u", " . join ( [ u"{}={!r}" . format ( key , value ) for key , value in kwargs . items ( ) ] ) desc = u"filter({})" . format ( desc ) if kwargs : def filter_fn ( elem ) : # pylint: disable=function-redefined, missing-docstring return all ( getattr ( elem , filter_key ) == filter_value for filter_key , filter_value in kwargs . items ( ) ) return self . transform ( lambda xs : ( x for x in xs if filter_fn ( x ) ) , desc = desc )
Return a copy of this query with some values removed .
274
11
16,918
def _execute ( self ) : data = self . seed_fn ( ) for transform in self . transforms : data = transform ( data ) return list ( data )
Run the query generating data from the seed_fn and performing transforms on the results .
34
17
16,919
def execute ( self , try_limit = 5 , try_interval = 0.5 , timeout = 30 ) : return Promise ( no_error ( self . _execute ) , u"Executing {!r}" . format ( self ) , try_limit = try_limit , try_interval = try_interval , timeout = timeout , ) . fulfill ( )
Execute this query retrying based on the supplied parameters .
79
12
16,920
def first ( self ) : def _transform ( xs ) : # pylint: disable=missing-docstring, invalid-name try : return [ six . next ( iter ( xs ) ) ] except StopIteration : return [ ] return self . transform ( _transform , 'first' )
Return a Query that selects only the first element of this Query . If no elements are available returns a query with no results .
64
25
16,921
def attrs ( self , attribute_name ) : desc = u'attrs({!r})' . format ( attribute_name ) return self . map ( lambda el : el . get_attribute ( attribute_name ) , desc ) . results
Retrieve HTML attribute values from the elements matched by the query .
52
13
16,922
def selected ( self ) : query_results = self . map ( lambda el : el . is_selected ( ) , 'selected' ) . results if query_results : return all ( query_results ) return False
Check whether all the matched elements are selected .
45
9
16,923
def visible ( self ) : query_results = self . map ( lambda el : el . is_displayed ( ) , 'visible' ) . results if query_results : return all ( query_results ) return False
Check whether all matched elements are visible .
46
8
16,924
def fill ( self , text ) : def _fill ( elem ) : # pylint: disable=missing-docstring elem . clear ( ) elem . send_keys ( text ) self . map ( _fill , u'fill({!r})' . format ( text ) ) . execute ( )
Set the text value of each matched element to text .
67
11
16,925
def url_converter ( self , * args , * * kwargs ) : upstream_converter = super ( PatchedManifestStaticFilesStorage , self ) . url_converter ( * args , * * kwargs ) def converter ( matchobj ) : try : upstream_converter ( matchobj ) except ValueError : # e.g. a static file 'static/media/logo.6a30f15f.svg' could not be found # because the upstream converter stripped 'static/' from the path matched , url = matchobj . groups ( ) return matched return converter
Return the custom URL converter for the given file name .
132
11
16,926
def order_by_on_list ( objects , order_field , is_desc = False ) : if callable ( order_field ) : objects . sort ( key = order_field , reverse = is_desc ) return def order_key ( x ) : v = getattr_path ( x , order_field ) if v is None : return MIN return v objects . sort ( key = order_key , reverse = is_desc )
Utility function to sort objects django - style even for non - query set collections
94
17
16,927
def render_table ( request , table , links = None , context = None , template = 'tri_table/list.html' , blank_on_empty = False , paginate_by = 40 , # pragma: no mutate page = None , paginator = None , show_hits = False , hit_label = 'Items' , post_bulk_edit = lambda table , queryset , updates : None ) : if not context : context = { } if isinstance ( table , Namespace ) : table = table ( ) assert isinstance ( table , Table ) , table table . request = request should_return , dispatch_result = handle_dispatch ( request = request , obj = table ) if should_return : return dispatch_result context [ 'bulk_form' ] = table . bulk_form context [ 'query_form' ] = table . query_form context [ 'tri_query_error' ] = table . query_error if table . bulk_form and request . method == 'POST' : if table . bulk_form . is_valid ( ) : queryset = table . bulk_queryset ( ) updates = { field . name : field . value for field in table . bulk_form . fields if field . value is not None and field . value != '' and field . attr is not None } queryset . update ( * * updates ) post_bulk_edit ( table = table , queryset = queryset , updates = updates ) return HttpResponseRedirect ( request . META [ 'HTTP_REFERER' ] ) table . context = table_context ( request , table = table , links = links , paginate_by = paginate_by , page = page , extra_context = context , paginator = paginator , show_hits = show_hits , hit_label = hit_label , ) if not table . data and blank_on_empty : return '' if table . query_form and not table . query_form . is_valid ( ) : table . data = None table . context [ 'invalid_form_message' ] = mark_safe ( '<i class="fa fa-meh-o fa-5x" aria-hidden="true"></i>' ) return render_template ( request , template , table . context )
Render a table . This automatically handles pagination sorting filtering and bulk operations .
505
15
16,928
def generate_duid ( mac ) : valid = mac and isinstance ( mac , six . string_types ) if not valid : raise ValueError ( "Invalid argument was passed" ) return "00:" + mac [ 9 : ] + ":" + mac
DUID is consisted of 10 hex numbers .
54
9
16,929
def try_value_to_bool ( value , strict_mode = True ) : if strict_mode : true_list = ( 'True' , ) false_list = ( 'False' , ) val = value else : true_list = ( 'true' , 'on' , 'yes' ) false_list = ( 'false' , 'off' , 'no' ) val = str ( value ) . lower ( ) if val in true_list : return True elif val in false_list : return False return value
Tries to convert value into boolean .
113
8
16,930
def create_network ( self , net_view_name , cidr , nameservers = None , members = None , gateway_ip = None , dhcp_trel_ip = None , network_extattrs = None ) : ipv4 = ib_utils . determine_ip_version ( cidr ) == 4 options = [ ] if nameservers : options . append ( obj . DhcpOption ( name = 'domain-name-servers' , value = "," . join ( nameservers ) ) ) if ipv4 and gateway_ip : options . append ( obj . DhcpOption ( name = 'routers' , value = gateway_ip ) ) if ipv4 and dhcp_trel_ip : options . append ( obj . DhcpOption ( name = 'dhcp-server-identifier' , num = 54 , value = dhcp_trel_ip ) ) return obj . Network . create ( self . connector , network_view = net_view_name , cidr = cidr , members = members , options = options , extattrs = network_extattrs , check_if_exists = False )
Create NIOS Network and prepare DHCP options .
254
9
16,931
def create_ip_range ( self , network_view , start_ip , end_ip , network , disable , range_extattrs ) : return obj . IPRange . create ( self . connector , network_view = network_view , start_addr = start_ip , end_addr = end_ip , cidr = network , disable = disable , extattrs = range_extattrs , check_if_exists = False )
Creates IPRange or fails if already exists .
98
11
16,932
def _parse_options ( self , options ) : attributes = ( 'host' , 'wapi_version' , 'username' , 'password' , 'ssl_verify' , 'http_request_timeout' , 'max_retries' , 'http_pool_connections' , 'http_pool_maxsize' , 'silent_ssl_warnings' , 'log_api_calls_as_info' , 'max_results' , 'paging' ) for attr in attributes : if isinstance ( options , dict ) and attr in options : setattr ( self , attr , options [ attr ] ) elif hasattr ( options , attr ) : value = getattr ( options , attr ) setattr ( self , attr , value ) elif attr in self . DEFAULT_OPTIONS : setattr ( self , attr , self . DEFAULT_OPTIONS [ attr ] ) else : msg = "WAPI config error. Option %s is not defined" % attr raise ib_ex . InfobloxConfigException ( msg = msg ) for attr in ( 'host' , 'username' , 'password' ) : if not getattr ( self , attr ) : msg = "WAPI config error. Option %s can not be blank" % attr raise ib_ex . InfobloxConfigException ( msg = msg ) self . wapi_url = "https://%s/wapi/v%s/" % ( self . host , self . wapi_version ) self . cloud_api_enabled = self . is_cloud_wapi ( self . wapi_version )
Copy needed options to self
362
5
16,933
def _parse_reply ( request ) : try : return jsonutils . loads ( request . content ) except ValueError : raise ib_ex . InfobloxConnectionError ( reason = request . content )
Tries to parse reply from NIOS .
42
9
16,934
def get_object ( self , obj_type , payload = None , return_fields = None , extattrs = None , force_proxy = False , max_results = None , paging = False ) : self . _validate_obj_type_or_die ( obj_type , obj_type_expected = False ) # max_results passed to get_object has priority over # one defined as connector option if max_results is None and self . max_results : max_results = self . max_results if paging is False and self . paging : paging = self . paging query_params = self . _build_query_params ( payload = payload , return_fields = return_fields , max_results = max_results , paging = paging ) # Clear proxy flag if wapi version is too old (non-cloud) proxy_flag = self . cloud_api_enabled and force_proxy ib_object = self . _handle_get_object ( obj_type , query_params , extattrs , proxy_flag ) if ib_object : return ib_object # Do second get call with force_proxy if not done yet if self . cloud_api_enabled and not force_proxy : ib_object = self . _handle_get_object ( obj_type , query_params , extattrs , proxy_flag = True ) if ib_object : return ib_object return None
Retrieve a list of Infoblox objects of type obj_type
306
14
16,935
def create_object ( self , obj_type , payload , return_fields = None ) : self . _validate_obj_type_or_die ( obj_type ) query_params = self . _build_query_params ( return_fields = return_fields ) url = self . _construct_url ( obj_type , query_params ) opts = self . _get_request_options ( data = payload ) self . _log_request ( 'post' , url , opts ) if ( self . session . cookies ) : # the first 'get' or 'post' action will generate a cookie # after that, we don't need to re-authenticate self . session . auth = None r = self . session . post ( url , * * opts ) self . _validate_authorized ( r ) if r . status_code != requests . codes . CREATED : response = utils . safe_json_load ( r . content ) already_assigned = 'is assigned to another network view' if response and already_assigned in response . get ( 'text' ) : exception = ib_ex . InfobloxMemberAlreadyAssigned else : exception = ib_ex . InfobloxCannotCreateObject raise exception ( response = response , obj_type = obj_type , content = r . content , args = payload , code = r . status_code ) return self . _parse_reply ( r )
Create an Infoblox object of type obj_type
307
11
16,936
def update_object ( self , ref , payload , return_fields = None ) : query_params = self . _build_query_params ( return_fields = return_fields ) opts = self . _get_request_options ( data = payload ) url = self . _construct_url ( ref , query_params ) self . _log_request ( 'put' , url , opts ) r = self . session . put ( url , * * opts ) self . _validate_authorized ( r ) if r . status_code != requests . codes . ok : self . _check_service_availability ( 'update' , r , ref ) raise ib_ex . InfobloxCannotUpdateObject ( response = jsonutils . loads ( r . content ) , ref = ref , content = r . content , code = r . status_code ) return self . _parse_reply ( r )
Update an Infoblox object
194
6
16,937
def delete_object ( self , ref , delete_arguments = None ) : opts = self . _get_request_options ( ) if not isinstance ( delete_arguments , dict ) : delete_arguments = { } url = self . _construct_url ( ref , query_params = delete_arguments ) self . _log_request ( 'delete' , url , opts ) r = self . session . delete ( url , * * opts ) self . _validate_authorized ( r ) if r . status_code != requests . codes . ok : self . _check_service_availability ( 'delete' , r , ref ) raise ib_ex . InfobloxCannotDeleteObject ( response = jsonutils . loads ( r . content ) , ref = ref , content = r . content , code = r . status_code ) return self . _parse_reply ( r )
Remove an Infoblox object
194
6
16,938
def _remap_fields ( cls , kwargs ) : mapped = { } for key in kwargs : if key in cls . _remap : mapped [ cls . _remap [ key ] ] = kwargs [ key ] else : mapped [ key ] = kwargs [ key ] return mapped
Map fields from kwargs into dict acceptable by NIOS
70
12
16,939
def from_dict ( cls , eas_from_nios ) : if not eas_from_nios : return return cls ( { name : cls . _process_value ( ib_utils . try_value_to_bool , eas_from_nios [ name ] [ 'value' ] ) for name in eas_from_nios } )
Converts extensible attributes from the NIOS reply .
79
11
16,940
def to_dict ( self ) : return { name : { 'value' : self . _process_value ( str , value ) } for name , value in self . _ea_dict . items ( ) if not ( value is None or value == "" or value == [ ] ) }
Converts extensible attributes into the format suitable for NIOS .
61
13
16,941
def _process_value ( func , value ) : if isinstance ( value , ( list , tuple ) ) : return [ func ( item ) for item in value ] return func ( value )
Applies processing method for value or each element in it .
40
12
16,942
def from_dict ( cls , connector , ip_dict ) : mapping = cls . _global_field_processing . copy ( ) mapping . update ( cls . _custom_field_processing ) # Process fields that require building themselves as objects for field in mapping : if field in ip_dict : ip_dict [ field ] = mapping [ field ] ( ip_dict [ field ] ) return cls ( connector , * * ip_dict )
Build dict fields as SubObjects if needed .
96
10
16,943
def field_to_dict ( self , field ) : value = getattr ( self , field ) if isinstance ( value , ( list , tuple ) ) : return [ self . value_to_dict ( val ) for val in value ] return self . value_to_dict ( value )
Read field value and converts to dict if possible
62
9
16,944
def to_dict ( self , search_fields = None ) : fields = self . _fields if search_fields == 'update' : fields = self . _search_for_update_fields elif search_fields == 'all' : fields = self . _all_searchable_fields elif search_fields == 'exclude' : # exclude search fields for update actions, # but include updateable_search_fields fields = [ field for field in self . _fields if field in self . _updateable_search_fields or field not in self . _search_for_update_fields ] return { field : self . field_to_dict ( field ) for field in fields if getattr ( self , field , None ) is not None }
Builds dict without None object fields
160
7
16,945
def fetch ( self , only_ref = False ) : if self . ref : reply = self . connector . get_object ( self . ref , return_fields = self . return_fields ) if reply : self . update_from_dict ( reply ) return True search_dict = self . to_dict ( search_fields = 'update' ) return_fields = [ ] if only_ref else self . return_fields reply = self . connector . get_object ( self . infoblox_type , search_dict , return_fields = return_fields ) if reply : self . update_from_dict ( reply [ 0 ] , only_ref = only_ref ) return True return False
Fetch object from NIOS by _ref or searchfields
148
12
16,946
def _ip_setter ( self , ipaddr_name , ipaddrs_name , ips ) : if isinstance ( ips , six . string_types ) : setattr ( self , ipaddr_name , ips ) elif isinstance ( ips , ( list , tuple ) ) and isinstance ( ips [ 0 ] , IP ) : setattr ( self , ipaddr_name , ips [ 0 ] . ip ) setattr ( self , ipaddrs_name , ips ) elif isinstance ( ips , IP ) : setattr ( self , ipaddr_name , ips . ip ) setattr ( self , ipaddrs_name , [ ips ] ) elif ips is None : setattr ( self , ipaddr_name , None ) setattr ( self , ipaddrs_name , None ) else : raise ValueError ( "Invalid format of ip passed in: %s." "Should be string or list of NIOS IP objects." % ips )
Setter for ip fields
217
5
16,947
def mac ( self , mac ) : self . _mac = mac if mac : self . duid = ib_utils . generate_duid ( mac ) elif not hasattr ( self , 'duid' ) : self . duid = None
Set mac and duid fields
53
6
16,948
def render_property ( property ) : # This ain't the prettiest thing, but it should get the job done. # I don't think we have anything more elegant available at bosh-manifest-generation time. # See https://docs.pivotal.io/partners/product-template-reference.html for list. if 'type' in property and property [ 'type' ] in PROPERTY_FIELDS : fields = { } for field in PROPERTY_FIELDS [ property [ 'type' ] ] : if type ( field ) is tuple : fields [ field [ 0 ] ] = '(( .properties.{}.{} ))' . format ( property [ 'name' ] , field [ 1 ] ) else : fields [ field ] = '(( .properties.{}.{} ))' . format ( property [ 'name' ] , field ) out = { property [ 'name' ] : fields } else : if property . get ( 'is_reference' , False ) : out = { property [ 'name' ] : property [ 'default' ] } else : out = { property [ 'name' ] : '(( .properties.{}.value ))' . format ( property [ 'name' ] ) } return out
Render a property for bosh manifest according to its type .
268
12
16,949
def match ( obj , matchers = TYPES ) : buf = get_bytes ( obj ) for matcher in matchers : if matcher . match ( buf ) : return matcher return None
Matches the given input againts the available file type matchers .
42
14
16,950
def signature ( array ) : length = len ( array ) index = _NUM_SIGNATURE_BYTES if length > _NUM_SIGNATURE_BYTES else length return array [ : index ]
Returns the first 262 bytes of the given bytearray as part of the file header signature .
43
20
16,951
def get_bytes ( obj ) : try : obj = obj . read ( _NUM_SIGNATURE_BYTES ) except AttributeError : # duck-typing as readable failed - we'll try the other options pass kind = type ( obj ) if kind is bytearray : return signature ( obj ) if kind is str : return get_signature_bytes ( obj ) if kind is bytes : return signature ( obj ) if kind is memoryview : return signature ( obj ) . tolist ( ) raise TypeError ( 'Unsupported type as file input: %s' % kind )
Infers the input type and reads the first 262 bytes returning a sliced bytearray .
125
19
16,952
def get_type ( mime = None , ext = None ) : for kind in types : if kind . extension is ext or kind . mime is mime : return kind return None
Returns the file type instance searching by MIME type or file extension .
39
14
16,953
def open ( self , encoding = None ) : try : if IS_GZIPPED_FILE . search ( self . _filename ) : _file = gzip . open ( self . _filename , 'rb' ) else : if encoding : _file = io . open ( self . _filename , 'r' , encoding = encoding , errors = 'replace' ) elif self . _encoding : _file = io . open ( self . _filename , 'r' , encoding = self . _encoding , errors = 'replace' ) else : _file = io . open ( self . _filename , 'r' , errors = 'replace' ) except IOError , e : self . _log_warning ( str ( e ) ) _file = None self . close ( ) return _file
Opens the file with the appropriate call
170
8
16,954
def close ( self ) : if not self . active : return self . active = False if self . _file : self . _file . close ( ) self . _sincedb_update_position ( force_update = True ) if self . _current_event : event = '\n' . join ( self . _current_event ) self . _current_event . clear ( ) self . _callback_wrapper ( [ event ] )
Closes all currently open file pointers
94
7
16,955
def _ensure_file_is_good ( self , current_time ) : if self . _last_file_mapping_update and current_time - self . _last_file_mapping_update <= self . _stat_interval : return self . _last_file_mapping_update = time . time ( ) try : st = os . stat ( self . _filename ) except EnvironmentError , err : if err . errno == errno . ENOENT : self . _log_info ( 'file removed' ) self . close ( ) return raise fid = self . get_file_id ( st ) if fid != self . _fid : self . _log_info ( 'file rotated' ) self . close ( ) elif self . _file . tell ( ) > st . st_size : if st . st_size == 0 and self . _ignore_truncate : self . _logger . info ( "[{0}] - file size is 0 {1}. " . format ( fid , self . _filename ) + "If you use another tool (i.e. logrotate) to truncate " + "the file, your application may continue to write to " + "the offset it last wrote later. In such a case, we'd " + "better do nothing here" ) return self . _log_info ( 'file truncated' ) self . _update_file ( seek_to_end = False ) elif REOPEN_FILES : self . _log_debug ( 'file reloaded (non-linux)' ) position = self . _file . tell ( ) self . _update_file ( seek_to_end = False ) if self . active : self . _file . seek ( position , os . SEEK_SET )
Every N seconds ensures that the file we are tailing is the file we expect to be tailing
388
20
16,956
def _run_pass ( self ) : while True : try : data = self . _file . read ( 4096 ) except IOError , e : if e . errno == errno . ESTALE : self . active = False return False lines = self . _buffer_extract ( data ) if not lines : # Before returning, check if an event (maybe partial) is waiting for too long. if self . _current_event and time . time ( ) - self . _last_activity > 1 : event = '\n' . join ( self . _current_event ) self . _current_event . clear ( ) self . _callback_wrapper ( [ event ] ) break self . _last_activity = time . time ( ) if self . _multiline_regex_after or self . _multiline_regex_before : # Multiline is enabled for this file. events = multiline_merge ( lines , self . _current_event , self . _multiline_regex_after , self . _multiline_regex_before ) else : events = lines if events : self . _callback_wrapper ( events ) if self . _sincedb_path : current_line_count = len ( lines ) self . _sincedb_update_position ( lines = current_line_count ) self . _sincedb_update_position ( )
Read lines from a file and performs a callback against them
302
11
16,957
def _sincedb_init ( self ) : if not self . _sincedb_path : return if not os . path . exists ( self . _sincedb_path ) : self . _log_debug ( 'initializing sincedb sqlite schema' ) conn = sqlite3 . connect ( self . _sincedb_path , isolation_level = None ) conn . execute ( """ create table sincedb ( fid text primary key, filename text, position integer default 1 ); """ ) conn . close ( )
Initializes the sincedb schema in an sqlite db
113
12
16,958
def _sincedb_update_position ( self , lines = 0 , force_update = False ) : if not self . _sincedb_path : return False self . _line_count = self . _line_count + lines old_count = self . _line_count_sincedb lines = self . _line_count current_time = int ( time . time ( ) ) if not force_update : if self . _last_sincedb_write and current_time - self . _last_sincedb_write <= self . _sincedb_write_interval : return False if old_count == lines : return False self . _sincedb_init ( ) self . _last_sincedb_write = current_time self . _log_debug ( 'updating sincedb to {0}' . format ( lines ) ) conn = sqlite3 . connect ( self . _sincedb_path , isolation_level = None ) cursor = conn . cursor ( ) query = 'insert or replace into sincedb (fid, filename) values (:fid, :filename);' cursor . execute ( query , { 'fid' : self . _fid , 'filename' : self . _filename } ) query = 'update sincedb set position = :position where fid = :fid and filename = :filename' cursor . execute ( query , { 'fid' : self . _fid , 'filename' : self . _filename , 'position' : lines , } ) conn . close ( ) self . _line_count_sincedb = lines return True
Retrieves the starting position from the sincedb sql db for a given file Returns a boolean representing whether or not it updated the record
351
28
16,959
def _sincedb_start_position ( self ) : if not self . _sincedb_path : return None self . _sincedb_init ( ) self . _log_debug ( 'retrieving start_position from sincedb' ) conn = sqlite3 . connect ( self . _sincedb_path , isolation_level = None ) cursor = conn . cursor ( ) cursor . execute ( 'select position from sincedb where fid = :fid and filename = :filename' , { 'fid' : self . _fid , 'filename' : self . _filename } ) start_position = None for row in cursor . fetchall ( ) : start_position , = row return start_position
Retrieves the starting position from the sincedb sql db for a given file
156
17
16,960
def _update_file ( self , seek_to_end = True ) : try : self . close ( ) self . _file = self . open ( ) except IOError : pass else : if not self . _file : return self . active = True try : st = os . stat ( self . _filename ) except EnvironmentError , err : if err . errno == errno . ENOENT : self . _log_info ( 'file removed' ) self . close ( ) fid = self . get_file_id ( st ) if not self . _fid : self . _fid = fid if fid != self . _fid : self . _log_info ( 'file rotated' ) self . close ( ) elif seek_to_end : self . _seek_to_end ( )
Open the file for tailing
174
6
16,961
def tail ( self , fname , encoding , window , position = None ) : if window <= 0 : raise ValueError ( 'invalid window %r' % window ) encodings = ENCODINGS if encoding : encodings = [ encoding ] + ENCODINGS for enc in encodings : try : f = self . open ( encoding = enc ) if f : return self . tail_read ( f , window , position = position ) return False except IOError , err : if err . errno == errno . ENOENT : return [ ] raise except UnicodeDecodeError : pass
Read last N lines from file fname .
128
9
16,962
def create_transport ( beaver_config , logger ) : transport_str = beaver_config . get ( 'transport' ) if '.' not in transport_str : # allow simple names like 'redis' to load a beaver built-in transport module_path = 'beaver.transports.%s_transport' % transport_str . lower ( ) class_name = '%sTransport' % transport_str . title ( ) else : # allow dotted path names to load a custom transport class try : module_path , class_name = transport_str . rsplit ( '.' , 1 ) except ValueError : raise Exception ( 'Invalid transport {0}' . format ( beaver_config . get ( 'transport' ) ) ) _module = __import__ ( module_path , globals ( ) , locals ( ) , class_name , - 1 ) transport_class = getattr ( _module , class_name ) transport = transport_class ( beaver_config = beaver_config , logger = logger ) return transport
Creates and returns a transport object
229
7
16,963
def update_files ( self ) : if self . _update_time and int ( time . time ( ) ) - self . _update_time < self . _discover_interval : return self . _update_time = int ( time . time ( ) ) possible_files = [ ] files = [ ] if len ( self . _beaver_config . get ( 'globs' ) ) > 0 : extend_files = files . extend for name , exclude in self . _beaver_config . get ( 'globs' ) . items ( ) : globbed = [ os . path . realpath ( filename ) for filename in eglob ( name , exclude ) ] extend_files ( globbed ) self . _beaver_config . addglob ( name , globbed ) self . _callback ( ( "addglob" , ( name , globbed ) ) ) else : append_files = files . append for name in self . listdir ( ) : append_files ( os . path . realpath ( os . path . join ( self . _folder , name ) ) ) for absname in files : try : st = os . stat ( absname ) except EnvironmentError , err : if err . errno != errno . ENOENT : raise else : if not stat . S_ISREG ( st . st_mode ) : continue append_possible_files = possible_files . append fid = self . get_file_id ( st ) append_possible_files ( ( fid , absname ) ) # add new ones new_files = [ fname for fid , fname in possible_files if fid not in self . _tails ] self . watch ( new_files )
Ensures all files are properly loaded . Detects new files file removals file rotation and truncation . On non - linux platforms it will also manually reload the file for tailing . Note that this hack is necessary because EOF is cached on BSD systems .
366
55
16,964
def close ( self , signalnum = None , frame = None ) : self . _running = False self . _log_debug ( "Closing all tail objects" ) self . _active = False for fid in self . _tails : self . _tails [ fid ] . close ( ) for n in range ( 0 , self . _number_of_consumer_processes ) : if self . _proc [ n ] is not None and self . _proc [ n ] . is_alive ( ) : self . _logger . debug ( "Terminate Process: " + str ( n ) ) self . _proc [ n ] . terminate ( ) self . _proc [ n ] . join ( )
Closes all currently open Tail objects
150
7
16,965
def expand_paths ( path ) : pr = itertools . product parts = MAGIC_BRACKETS . findall ( path ) if not path : return if not parts : return [ path ] permutations = [ [ ( p [ 0 ] , i , 1 ) for i in p [ 1 ] . split ( ',' ) ] for p in parts ] return [ _replace_all ( path , i ) for i in pr ( * permutations ) ]
When given a path with brackets expands it to return all permutations of the path with expanded brackets similar to ant .
98
23
16,966
def multiline_merge ( lines , current_event , re_after , re_before ) : events = [ ] for line in lines : if re_before and re_before . match ( line ) : current_event . append ( line ) elif re_after and current_event and re_after . match ( current_event [ - 1 ] ) : current_event . append ( line ) else : if current_event : events . append ( '\n' . join ( current_event ) ) current_event . clear ( ) current_event . append ( line ) return events
Merge multi - line events based .
127
8
16,967
def create_ssh_tunnel ( beaver_config , logger = None ) : if not beaver_config . use_ssh_tunnel ( ) : return None logger . info ( "Proxying transport using through local ssh tunnel" ) return BeaverSshTunnel ( beaver_config , logger = logger )
Returns a BeaverSshTunnel object if the current config requires us to
69
16
16,968
def poll ( self ) : if self . _subprocess is not None : self . _subprocess . poll ( ) time . sleep ( self . _beaver_config . get ( 'subprocess_poll_sleep' ) )
Poll attached subprocess until it is available
49
8
16,969
def close ( self ) : if self . _subprocess is not None : os . killpg ( self . _subprocess . pid , signal . SIGTERM ) self . _subprocess = None
Close child subprocess
42
4
16,970
def _to_unicode ( self , data , encoding , errors = 'strict' ) : # strip Byte Order Mark (if present) if ( len ( data ) >= 4 ) and ( data [ : 2 ] == '\xfe\xff' ) and ( data [ 2 : 4 ] != '\x00\x00' ) : encoding = 'utf-16be' data = data [ 2 : ] elif ( len ( data ) >= 4 ) and ( data [ : 2 ] == '\xff\xfe' ) and ( data [ 2 : 4 ] != '\x00\x00' ) : encoding = 'utf-16le' data = data [ 2 : ] elif data [ : 3 ] == '\xef\xbb\xbf' : encoding = 'utf-8' data = data [ 3 : ] elif data [ : 4 ] == '\x00\x00\xfe\xff' : encoding = 'utf-32be' data = data [ 4 : ] elif data [ : 4 ] == '\xff\xfe\x00\x00' : encoding = 'utf-32le' data = data [ 4 : ] newdata = unicode ( data , encoding , errors ) return newdata
Given a string and its encoding decodes the string into Unicode . %encoding is a string recognized by encodings . aliases
272
26
16,971
def reconnect ( self ) : try : self . conn . close ( ) except Exception , e : self . logger . warn ( e ) self . createConnection ( ) return True
Allows reconnection from when a handled TransportException is thrown
36
11
16,972
def _check_connections ( self ) : for server in self . _servers : if self . _is_reachable ( server ) : server [ 'down_until' ] = 0 else : server [ 'down_until' ] = time . time ( ) + 5
Checks if all configured redis servers are reachable
59
11
16,973
def _is_reachable ( self , server ) : try : server [ 'redis' ] . ping ( ) return True except UserWarning : self . _logger . warn ( 'Cannot reach redis server: ' + server [ 'url' ] ) except Exception : self . _logger . warn ( 'Cannot reach redis server: ' + server [ 'url' ] ) return False
Checks if the given redis server is reachable
86
11
16,974
def invalidate ( self ) : super ( RedisTransport , self ) . invalidate ( ) for server in self . _servers : server [ 'redis' ] . connection_pool . disconnect ( ) return False
Invalidates the current transport and disconnects all redis connections
47
12
16,975
def callback ( self , filename , lines , * * kwargs ) : self . _logger . debug ( 'Redis transport called' ) timestamp = self . get_timestamp ( * * kwargs ) if kwargs . get ( 'timestamp' , False ) : del kwargs [ 'timestamp' ] namespaces = self . _beaver_config . get_field ( 'redis_namespace' , filename ) if not namespaces : namespaces = self . _namespace namespaces = namespaces . split ( "," ) self . _logger . debug ( 'Got namespaces: ' . join ( namespaces ) ) data_type = self . _data_type self . _logger . debug ( 'Got data type: ' + data_type ) server = self . _get_next_server ( ) self . _logger . debug ( 'Got redis server: ' + server [ 'url' ] ) pipeline = server [ 'redis' ] . pipeline ( transaction = False ) callback_map = { self . LIST_DATA_TYPE : pipeline . rpush , self . CHANNEL_DATA_TYPE : pipeline . publish , } callback_method = callback_map [ data_type ] for line in lines : for namespace in namespaces : callback_method ( namespace . strip ( ) , self . format ( filename , line , timestamp , * * kwargs ) ) try : pipeline . execute ( ) except redis . exceptions . RedisError , exception : self . _logger . warn ( 'Cannot push lines to redis server: ' + server [ 'url' ] ) raise TransportException ( exception )
Sends log lines to redis servers
356
8
16,976
def _get_next_server ( self ) : current_try = 0 max_tries = len ( self . _servers ) while current_try < max_tries : server_index = self . _raise_server_index ( ) server = self . _servers [ server_index ] down_until = server [ 'down_until' ] self . _logger . debug ( 'Checking server ' + str ( current_try + 1 ) + '/' + str ( max_tries ) + ': ' + server [ 'url' ] ) if down_until == 0 : self . _logger . debug ( 'Elected server: ' + server [ 'url' ] ) return server if down_until < time . time ( ) : if self . _is_reachable ( server ) : server [ 'down_until' ] = 0 self . _logger . debug ( 'Elected server: ' + server [ 'url' ] ) return server else : self . _logger . debug ( 'Server still unavailable: ' + server [ 'url' ] ) server [ 'down_until' ] = time . time ( ) + 5 current_try += 1 raise TransportException ( 'Cannot reach any redis server' )
Returns a valid redis server or raises a TransportException
269
11
16,977
def valid ( self ) : valid_servers = 0 for server in self . _servers : if server [ 'down_until' ] <= time . time ( ) : valid_servers += 1 return valid_servers > 0
Returns whether or not the transport can send data to any redis server
50
14
16,978
def format ( self , filename , line , timestamp , * * kwargs ) : line = unicode ( line . encode ( "utf-8" ) , "utf-8" , errors = "ignore" ) formatter = self . _beaver_config . get_field ( 'format' , filename ) if formatter not in self . _formatters : formatter = self . _default_formatter data = { self . _fields . get ( 'type' ) : kwargs . get ( 'type' ) , self . _fields . get ( 'tags' ) : kwargs . get ( 'tags' ) , '@timestamp' : timestamp , self . _fields . get ( 'host' ) : self . _current_host , self . _fields . get ( 'file' ) : filename , self . _fields . get ( 'message' ) : line } if self . _logstash_version == 0 : data [ '@source' ] = 'file://{0}' . format ( filename ) data [ '@fields' ] = kwargs . get ( 'fields' ) else : data [ '@version' ] = self . _logstash_version fields = kwargs . get ( 'fields' ) for key in fields : data [ key ] = fields . get ( key ) return self . _formatters [ formatter ] ( data )
Returns a formatted log line
301
5
16,979
def get_timestamp ( self , * * kwargs ) : timestamp = kwargs . get ( 'timestamp' ) if not timestamp : now = datetime . datetime . utcnow ( ) timestamp = now . strftime ( "%Y-%m-%dT%H:%M:%S" ) + ".%03d" % ( now . microsecond / 1000 ) + "Z" return timestamp
Retrieves the timestamp for a given set of data
92
11
16,980
def _make_executable ( path ) : os . chmod ( path , os . stat ( path ) . st_mode | stat . S_IXUSR | stat . S_IXGRP | stat . S_IXOTH )
Make the file at path executable .
51
7
16,981
def build_parser ( ) : parser = argparse . ArgumentParser ( description = __doc__ , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) # Required args parser . add_argument ( "--in_path" , "-i" , required = True , help = "file path to input GCT(x) file" ) parser . add_argument ( "--rid" , nargs = "+" , help = "filepath to grp file or string array for including rows" ) parser . add_argument ( "--cid" , nargs = "+" , help = "filepath to grp file or string array for including cols" ) parser . add_argument ( "--exclude_rid" , "-er" , nargs = "+" , help = "filepath to grp file or string array for excluding rows" ) parser . add_argument ( "--exclude_cid" , "-ec" , nargs = "+" , help = "filepath to grp file or string array for excluding cols" ) parser . add_argument ( "--out_name" , "-o" , default = "ds_subsetted.gct" , help = "what to name the output file" ) parser . add_argument ( "--out_type" , default = "gct" , choices = [ "gct" , "gctx" ] , help = "whether to write output as GCT or GCTx" ) parser . add_argument ( "--verbose" , "-v" , action = "store_true" , default = False , help = "whether to increase the # of messages reported" ) return parser
Build argument parser .
363
4
16,982
def _read_arg ( arg ) : # If arg is None, just return it back if arg is None : arg_out = arg else : # If len(arg) == 1 and arg[0] is a valid filepath, read it as a grp file if len ( arg ) == 1 and os . path . exists ( arg [ 0 ] ) : arg_out = grp . read ( arg [ 0 ] ) else : arg_out = arg # Make sure that arg_out is a list of strings assert isinstance ( arg_out , list ) , "arg_out must be a list." assert type ( arg_out [ 0 ] ) == str , "arg_out must be a list of strings." return arg_out
If arg is a list with 1 element that corresponds to a valid file path use set_io . grp to read the grp file . Otherwise check that arg is a list of strings .
158
39
16,983
def read ( file_path ) : # Read in file actual_file_path = os . path . expanduser ( file_path ) with open ( actual_file_path , 'r' ) as f : lines = f . readlines ( ) # Create GMT object gmt = [ ] # Iterate over each line for line_num , line in enumerate ( lines ) : # Separate along tabs fields = line . split ( '\t' ) assert len ( fields ) > 2 , ( "Each line must have at least 3 tab-delimited items. " + "line_num: {}, fields: {}" ) . format ( line_num , fields ) # Get rid of trailing whitespace fields [ - 1 ] = fields [ - 1 ] . rstrip ( ) # Collect entries entries = fields [ 2 : ] # Remove empty entries entries = [ x for x in entries if x ] assert len ( set ( entries ) ) == len ( entries ) , ( "There should not be duplicate entries for the same set. " + "line_num: {}, entries: {}" ) . format ( line_num , entries ) # Store this line as a dictionary line_dict = { SET_IDENTIFIER_FIELD : fields [ 0 ] , SET_DESC_FIELD : fields [ 1 ] , SET_MEMBERS_FIELD : entries } gmt . append ( line_dict ) verify_gmt_integrity ( gmt ) return gmt
Read a gmt file at the path specified by file_path .
313
14
16,984
def verify_gmt_integrity ( gmt ) : # Verify that set ids are unique set_ids = [ d [ SET_IDENTIFIER_FIELD ] for d in gmt ] assert len ( set ( set_ids ) ) == len ( set_ids ) , ( "Set identifiers should be unique. set_ids: {}" . format ( set_ids ) )
Make sure that set ids are unique .
82
9
16,985
def write ( gmt , out_path ) : with open ( out_path , 'w' ) as f : for _ , each_dict in enumerate ( gmt ) : f . write ( each_dict [ SET_IDENTIFIER_FIELD ] + '\t' ) f . write ( each_dict [ SET_DESC_FIELD ] + '\t' ) f . write ( '\t' . join ( [ str ( entry ) for entry in each_dict [ SET_MEMBERS_FIELD ] ] ) ) f . write ( '\n' )
Write a GMT to a text file .
126
8
16,986
def parse ( gctx_file_path , convert_neg_666 = True , rid = None , cid = None , ridx = None , cidx = None , row_meta_only = False , col_meta_only = False , make_multiindex = False ) : full_path = os . path . expanduser ( gctx_file_path ) # Verify that the path exists if not os . path . exists ( full_path ) : err_msg = "The given path to the gctx file cannot be found. full_path: {}" logger . error ( err_msg . format ( full_path ) ) raise Exception ( err_msg . format ( full_path ) ) logger . info ( "Reading GCTX: {}" . format ( full_path ) ) # open file gctx_file = h5py . File ( full_path , "r" ) if row_meta_only : # read in row metadata row_dset = gctx_file [ row_meta_group_node ] row_meta = parse_metadata_df ( "row" , row_dset , convert_neg_666 ) # validate optional input ids & get indexes to subset by ( sorted_ridx , sorted_cidx ) = check_and_order_id_inputs ( rid , ridx , cid , cidx , row_meta , None ) gctx_file . close ( ) # subset if specified, then return row_meta = row_meta . iloc [ sorted_ridx ] return row_meta elif col_meta_only : # read in col metadata col_dset = gctx_file [ col_meta_group_node ] col_meta = parse_metadata_df ( "col" , col_dset , convert_neg_666 ) # validate optional input ids & get indexes to subset by ( sorted_ridx , sorted_cidx ) = check_and_order_id_inputs ( rid , ridx , cid , cidx , None , col_meta ) gctx_file . close ( ) # subset if specified, then return col_meta = col_meta . iloc [ sorted_cidx ] return col_meta else : # read in row metadata row_dset = gctx_file [ row_meta_group_node ] row_meta = parse_metadata_df ( "row" , row_dset , convert_neg_666 ) # read in col metadata col_dset = gctx_file [ col_meta_group_node ] col_meta = parse_metadata_df ( "col" , col_dset , convert_neg_666 ) # validate optional input ids & get indexes to subset by ( sorted_ridx , sorted_cidx ) = check_and_order_id_inputs ( rid , ridx , cid , cidx , row_meta , col_meta ) data_dset = gctx_file [ data_node ] data_df = parse_data_df ( data_dset , sorted_ridx , sorted_cidx , row_meta , col_meta ) # (if subsetting) subset metadata row_meta = row_meta . iloc [ sorted_ridx ] col_meta = col_meta . iloc [ sorted_cidx ] # get version my_version = gctx_file . attrs [ version_node ] if type ( my_version ) == np . ndarray : my_version = my_version [ 0 ] gctx_file . close ( ) # make GCToo instance my_gctoo = GCToo . GCToo ( data_df = data_df , row_metadata_df = row_meta , col_metadata_df = col_meta , src = full_path , version = my_version , make_multiindex = make_multiindex ) return my_gctoo
Primary method of script . Reads in path to a gctx file and parses into GCToo object .
853
23
16,987
def check_id_idx_exclusivity ( id , idx ) : if ( id is not None and idx is not None ) : msg = ( "'id' and 'idx' fields can't both not be None," + " please specify subset in only one of these fields" ) logger . error ( msg ) raise Exception ( "parse_gctx.check_id_idx_exclusivity: " + msg ) elif id is not None : return ( "id" , id ) elif idx is not None : return ( "idx" , idx ) else : return ( None , [ ] )
Makes sure user didn t provide both ids and idx values to subset by .
136
18
16,988
def parse_data_df ( data_dset , ridx , cidx , row_meta , col_meta ) : if len ( ridx ) == len ( row_meta . index ) and len ( cidx ) == len ( col_meta . index ) : # no subset data_array = np . empty ( data_dset . shape , dtype = np . float32 ) data_dset . read_direct ( data_array ) data_array = data_array . transpose ( ) elif len ( ridx ) <= len ( cidx ) : first_subset = data_dset [ : , ridx ] . astype ( np . float32 ) data_array = first_subset [ cidx , : ] . transpose ( ) elif len ( cidx ) < len ( ridx ) : first_subset = data_dset [ cidx , : ] . astype ( np . float32 ) data_array = first_subset [ : , ridx ] . transpose ( ) # make DataFrame instance data_df = pd . DataFrame ( data_array , index = row_meta . index [ ridx ] , columns = col_meta . index [ cidx ] ) return data_df
Parses in data_df from hdf5 subsetting if specified .
277
16
16,989
def get_column_metadata ( gctx_file_path , convert_neg_666 = True ) : full_path = os . path . expanduser ( gctx_file_path ) # open file gctx_file = h5py . File ( full_path , "r" ) col_dset = gctx_file [ col_meta_group_node ] col_meta = parse_metadata_df ( "col" , col_dset , convert_neg_666 ) gctx_file . close ( ) return col_meta
Opens . gctx file and returns only column metadata
118
11
16,990
def get_row_metadata ( gctx_file_path , convert_neg_666 = True ) : full_path = os . path . expanduser ( gctx_file_path ) # open file gctx_file = h5py . File ( full_path , "r" ) row_dset = gctx_file [ row_meta_group_node ] row_meta = parse_metadata_df ( "row" , row_dset , convert_neg_666 ) gctx_file . close ( ) return row_meta
Opens . gctx file and returns only row metadata
118
11
16,991
def multi_index_df_to_component_dfs ( multi_index_df , rid = "rid" , cid = "cid" ) : # Id level of the multiindex will become the index rids = list ( multi_index_df . index . get_level_values ( rid ) ) cids = list ( multi_index_df . columns . get_level_values ( cid ) ) # It's possible that the index and/or columns of multi_index_df are not # actually multi-index; need to check for this and there are more than one level in index(python3) if isinstance ( multi_index_df . index , pd . MultiIndex ) : # check if there are more than one levels in index (python3) if len ( multi_index_df . index . names ) > 1 : # If so, drop rid because it won't go into the body of the metadata mi_df_index = multi_index_df . index . droplevel ( rid ) # Names of the multiindex levels become the headers rhds = list ( mi_df_index . names ) # Assemble metadata values row_metadata = np . array ( [ mi_df_index . get_level_values ( level ) . values for level in list ( rhds ) ] ) . T # if there is one level in index (python3), then rhds and row metadata should be empty else : rhds = [ ] row_metadata = [ ] # If the index is not multi-index, then rhds and row metadata should be empty else : rhds = [ ] row_metadata = [ ] # Check if columns of multi_index_df are in fact multi-index if isinstance ( multi_index_df . columns , pd . MultiIndex ) : # Check if there are more than one levels in columns(python3) if len ( multi_index_df . columns . names ) > 1 : # If so, drop cid because it won't go into the body of the metadata mi_df_columns = multi_index_df . columns . droplevel ( cid ) # Names of the multiindex levels become the headers chds = list ( mi_df_columns . names ) # Assemble metadata values col_metadata = np . array ( [ mi_df_columns . get_level_values ( level ) . values for level in list ( chds ) ] ) . T # If there is one level in columns (python3), then rhds and row metadata should be empty else : chds = [ ] col_metadata = [ ] # If the columns are not multi-index, then rhds and row metadata should be empty else : chds = [ ] col_metadata = [ ] # Create component dfs row_metadata_df = pd . DataFrame . from_records ( row_metadata , index = pd . Index ( rids , name = "rid" ) , columns = pd . Index ( rhds , name = "rhd" ) ) col_metadata_df = pd . DataFrame . from_records ( col_metadata , index = pd . Index ( cids , name = "cid" ) , columns = pd . Index ( chds , name = "chd" ) ) data_df = pd . DataFrame ( multi_index_df . values , index = pd . Index ( rids , name = "rid" ) , columns = pd . Index ( cids , name = "cid" ) ) return data_df , row_metadata_df , col_metadata_df
Convert a multi - index df into 3 component dfs .
778
13
16,992
def check_df ( self , df ) : if isinstance ( df , pd . DataFrame ) : if not df . index . is_unique : repeats = df . index [ df . index . duplicated ( ) ] . values msg = "Index values must be unique but aren't. The following entries appear more than once: {}" . format ( repeats ) self . logger . error ( msg ) raise Exception ( "GCToo GCToo.check_df " + msg ) if not df . columns . is_unique : repeats = df . columns [ df . columns . duplicated ( ) ] . values msg = "Columns values must be unique but aren't. The following entries appear more than once: {}" . format ( repeats ) raise Exception ( "GCToo GCToo.check_df " + msg ) else : return True else : msg = "expected Pandas DataFrame, got something else: {} of type: {}" . format ( df , type ( df ) ) self . logger . error ( msg ) raise Exception ( "GCToo GCToo.check_df " + msg )
Verifies that df is a pandas DataFrame instance and that its index and column values are unique .
237
21
16,993
def are_genes_in_api ( my_clue_api_client , gene_symbols ) : if len ( gene_symbols ) > 0 : query_gene_symbols = gene_symbols if type ( gene_symbols ) is list else list ( gene_symbols ) query_result = my_clue_api_client . run_filter_query ( resource_name , { "where" : { "gene_symbol" : { "inq" : query_gene_symbols } } , "fields" : { "gene_symbol" : True } } ) logger . debug ( "query_result: {}" . format ( query_result ) ) r = set ( [ x [ "gene_symbol" ] for x in query_result ] ) return r else : logger . warning ( "provided gene_symbols was empty, cannot run query" ) return set ( )
determine if genes are present in the API
212
10
16,994
def write ( gctoo , out_fname , data_null = "NaN" , metadata_null = "-666" , filler_null = "-666" , data_float_format = "%.4f" ) : # Create handle for output file if not out_fname . endswith ( ".gct" ) : out_fname += ".gct" f = open ( out_fname , "w" ) # Write first two lines dims = [ str ( gctoo . data_df . shape [ 0 ] ) , str ( gctoo . data_df . shape [ 1 ] ) , str ( gctoo . row_metadata_df . shape [ 1 ] ) , str ( gctoo . col_metadata_df . shape [ 1 ] ) ] write_version_and_dims ( VERSION , dims , f ) # Write top half of the gct write_top_half ( f , gctoo . row_metadata_df , gctoo . col_metadata_df , metadata_null , filler_null ) # Write bottom half of the gct write_bottom_half ( f , gctoo . row_metadata_df , gctoo . data_df , data_null , data_float_format , metadata_null ) f . close ( ) logger . info ( "GCT has been written to {}" . format ( out_fname ) )
Write a gctoo object to a gct file .
308
12
16,995
def write_version_and_dims ( version , dims , f ) : f . write ( ( "#" + version + "\n" ) ) f . write ( ( dims [ 0 ] + "\t" + dims [ 1 ] + "\t" + dims [ 2 ] + "\t" + dims [ 3 ] + "\n" ) )
Write first two lines of gct file .
79
9
16,996
def append_dims_and_file_extension ( fname , data_df ) : # If there's no .gct at the end of output file name, add the dims and .gct if not fname . endswith ( ".gct" ) : out_fname = '{0}_n{1}x{2}.gct' . format ( fname , data_df . shape [ 1 ] , data_df . shape [ 0 ] ) return out_fname # Otherwise, only add the dims else : basename = os . path . splitext ( fname ) [ 0 ] out_fname = '{0}_n{1}x{2}.gct' . format ( basename , data_df . shape [ 1 ] , data_df . shape [ 0 ] ) return out_fname
Append dimensions and file extension to output filename . N . B . Dimensions are cols x rows .
187
21
16,997
def robust_zscore ( mat , ctrl_mat = None , min_mad = 0.1 ) : # If optional df exists, calc medians and mads from it if ctrl_mat is not None : medians = ctrl_mat . median ( axis = 1 ) median_devs = abs ( ctrl_mat . subtract ( medians , axis = 0 ) ) # Else just use plate medians else : medians = mat . median ( axis = 1 ) median_devs = abs ( mat . subtract ( medians , axis = 0 ) ) sub = mat . subtract ( medians , axis = 'index' ) mads = median_devs . median ( axis = 1 ) # Threshold mads mads = mads . clip ( lower = min_mad ) # Must multiply values by 1.4826 to make MAD comparable to SD # (https://en.wikipedia.org/wiki/Median_absolute_deviation) zscore_df = sub . divide ( mads * 1.4826 , axis = 'index' ) return zscore_df . round ( rounding_precision )
Robustly z - score a pandas df along the rows .
241
14
16,998
def parse ( file_path , convert_neg_666 = True , rid = None , cid = None , ridx = None , cidx = None , row_meta_only = False , col_meta_only = False , make_multiindex = False ) : if file_path . endswith ( ".gct" ) : out = parse_gct . parse ( file_path , convert_neg_666 = convert_neg_666 , rid = rid , cid = cid , ridx = ridx , cidx = cidx , row_meta_only = row_meta_only , col_meta_only = col_meta_only , make_multiindex = make_multiindex ) elif file_path . endswith ( ".gctx" ) : out = parse_gctx . parse ( file_path , convert_neg_666 = convert_neg_666 , rid = rid , cid = cid , ridx = ridx , cidx = cidx , row_meta_only = row_meta_only , col_meta_only = col_meta_only , make_multiindex = make_multiindex ) else : err_msg = "File to parse must be .gct or .gctx!" logger . error ( err_msg ) raise Exception ( err_msg ) return out
Identifies whether file_path corresponds to a . gct or . gctx file and calls the correct corresponding parse method .
293
25
16,999
def get_upper_triangle ( correlation_matrix ) : upper_triangle = correlation_matrix . where ( np . triu ( np . ones ( correlation_matrix . shape ) , k = 1 ) . astype ( np . bool ) ) # convert matrix into long form description upper_tri_df = upper_triangle . stack ( ) . reset_index ( level = 1 ) upper_tri_df . columns = [ 'rid' , 'corr' ] # Index at this point is cid, it now becomes a column upper_tri_df . reset_index ( level = 0 , inplace = True ) # Get rid of negative values upper_tri_df [ 'corr' ] = upper_tri_df [ 'corr' ] . clip ( lower = 0 ) return upper_tri_df . round ( rounding_precision )
Extract upper triangle from a square matrix . Negative values are set to 0 .
187
16