query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Given a dictionary with keys = ids and values equals to strings generates and xliff file to send to unbabel .
def generate_xliff ( entry_dict ) : entries = "" for key , value in entry_dict . iteritems ( ) : entries += create_trans_unit ( key , value ) . strip ( ) + "\n" xliff_str = get_head_xliff ( ) . strip ( ) + "\n" + entries + get_tail_xliff ( ) . strip ( ) return xliff_str
6,600
https://github.com/Unbabel/unbabel-py/blob/3bd6397174e184d89d2a11149d87be5d12570c64/unbabel/xliff_converter.py#L5-L34
[ "def", "hook", "(", "self", ",", "m", ":", "nn", ".", "Module", ",", "i", ":", "Tensors", ",", "o", ":", "Tensors", ")", "->", "Tuple", "[", "Rank0Tensor", ",", "Rank0Tensor", "]", ":", "return", "o", ".", "mean", "(", ")", ".", "item", "(", ")", ",", "o", ".", "std", "(", ")", ".", "item", "(", ")" ]
Get alert by providing name ID or other unique key .
def Get ( self , key ) : for alert in self . alerts : if alert . id == key : return ( alert ) elif alert . name == key : return ( alert )
6,601
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/alert.py#L30-L39
[ "def", "apply", "(", "self", ")", ":", "self", ".", "read_group_info", "(", ")", "if", "self", ".", "tabs", ".", "count", "(", ")", "==", "0", ":", "# disactivate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "False", ")", "self", ".", "button_del", ".", "setEnabled", "(", "False", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "False", ")", "else", ":", "# activate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "True", ")", "self", ".", "button_del", ".", "setEnabled", "(", "True", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "True", ")", "if", "self", ".", "groups", ":", "self", ".", "parent", ".", "overview", ".", "update_position", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "update", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")", "else", ":", "self", ".", "parent", ".", "traces", ".", "reset", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "reset", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")" ]
Search alert list by providing partial name ID or other key .
def Search ( self , key ) : results = [ ] for alert in self . alerts : if alert . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( alert ) elif alert . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( alert ) return ( results )
6,602
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/alert.py#L42-L52
[ "def", "get_requests_session", "(", ")", ":", "session", "=", "requests", ".", "sessions", ".", "Session", "(", ")", "session", ".", "mount", "(", "'http://'", ",", "HTTPAdapter", "(", "pool_connections", "=", "25", ",", "pool_maxsize", "=", "25", ",", "pool_block", "=", "True", ")", ")", "session", ".", "mount", "(", "'https://'", ",", "HTTPAdapter", "(", "pool_connections", "=", "25", ",", "pool_maxsize", "=", "25", ",", "pool_block", "=", "True", ")", ")", "return", "session" ]
Login to retrieve bearer token and set default accoutn and location aliases .
def _Login ( ) : if not clc . v2 . V2_API_USERNAME or not clc . v2 . V2_API_PASSWD : clc . v1 . output . Status ( 'ERROR' , 3 , 'V2 API username and password not provided' ) raise ( clc . APIV2NotEnabled ) session = clc . _REQUESTS_SESSION session . headers [ 'content-type' ] = "application/json" r = session . request ( "POST" , "%s/v2/%s" % ( clc . defaults . ENDPOINT_URL_V2 , "authentication/login" ) , json = { "username" : clc . v2 . V2_API_USERNAME , "password" : clc . v2 . V2_API_PASSWD } , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) if r . status_code == 200 : clc . _LOGIN_TOKEN_V2 = r . json ( ) [ 'bearerToken' ] clc . ALIAS = r . json ( ) [ 'accountAlias' ] clc . LOCATION = r . json ( ) [ 'locationAlias' ] elif r . status_code == 400 : raise ( Exception ( "Invalid V2 API login. %s" % ( r . json ( ) [ 'message' ] ) ) ) else : raise ( Exception ( "Error logging into V2 API. Response code %s. message %s" % ( r . status_code , r . json ( ) [ 'message' ] ) ) )
6,603
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/api.py#L62-L83
[ "def", "probe_project", "(", "python_module", ")", ":", "log", ".", "info", "(", "'Checking project for changes requirements.'", ")", "return", "(", "has_tools", "(", ")", "and", "has_setup", "(", ")", "and", "has_metadata", "(", "python_module", ")", "and", "has_test_runner", "(", ")", "and", "has_readme", "(", ")", "and", "has_changelog", "(", ")", ")" ]
Execute v2 API call .
def Call ( method , url , payload = None , session = None , debug = False ) : if session is not None : token = session [ 'token' ] http_session = session [ 'http_session' ] else : if not clc . _LOGIN_TOKEN_V2 : API . _Login ( ) token = clc . _LOGIN_TOKEN_V2 http_session = clc . _REQUESTS_SESSION if payload is None : payload = { } # If executing refs provided in API they are abs paths, # Else refs we build in the sdk are relative if url [ 0 ] == '/' : fq_url = "%s%s" % ( clc . defaults . ENDPOINT_URL_V2 , url ) else : fq_url = "%s/v2/%s" % ( clc . defaults . ENDPOINT_URL_V2 , url ) http_session . headers . update ( { 'Authorization' : "Bearer %s" % token } ) if isinstance ( payload , basestring ) : http_session . headers [ 'content-type' ] = "Application/json" # added for server ops with str payload else : http_session . headers [ 'content-type' ] = "application/x-www-form-urlencoded" if method == "GET" : r = http_session . request ( method , fq_url , params = payload , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) else : r = http_session . request ( method , fq_url , data = payload , verify = API . _ResourcePath ( 'clc/cacert.pem' ) ) if debug : API . _DebugRequest ( request = requests . Request ( method , fq_url , data = payload , headers = http_session . headers ) . prepare ( ) , response = r ) if r . status_code >= 200 and r . status_code < 300 : try : return ( r . json ( ) ) except : return ( { } ) else : try : e = clc . APIFailedResponse ( "Response code %s. %s %s %s" % ( r . status_code , r . json ( ) [ 'message' ] , method , fq_url ) ) e . response_status_code = r . status_code e . response_json = r . json ( ) e . response_text = r . text raise ( e ) except clc . APIFailedResponse : raise except : e = clc . APIFailedResponse ( "Response code %s. %s. %s %s" % ( r . status_code , r . text , method , fq_url ) ) e . response_status_code = r . status_code e . response_json = { } # or should this be None? e . response_text = r . text raise ( e )
6,604
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/api.py#L87-L152
[ "def", "_result_is_lyrics", "(", "self", ",", "song_title", ")", ":", "default_terms", "=", "[", "'track\\\\s?list'", ",", "'album art(work)?'", ",", "'liner notes'", ",", "'booklet'", ",", "'credits'", ",", "'interview'", ",", "'skit'", ",", "'instrumental'", ",", "'setlist'", "]", "if", "self", ".", "excluded_terms", ":", "if", "self", ".", "replace_default_terms", ":", "default_terms", "=", "self", ".", "excluded_terms", "else", ":", "default_terms", ".", "extend", "(", "self", ".", "excluded_terms", ")", "expression", "=", "r\"\"", ".", "join", "(", "[", "\"({})|\"", ".", "format", "(", "term", ")", "for", "term", "in", "default_terms", "]", ")", ".", "strip", "(", "'|'", ")", "regex", "=", "re", ".", "compile", "(", "expression", ",", "re", ".", "IGNORECASE", ")", "return", "not", "regex", ".", "search", "(", "self", ".", "_clean_str", "(", "song_title", ")", ")" ]
Returns the external references of the element
def get_external_references ( self ) : node = self . node . find ( 'externalReferences' ) if node is not None : ext_refs = CexternalReferences ( node ) for ext_ref in ext_refs : yield ext_ref
6,605
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L102-L112
[ "def", "config_diff", "(", "args", ")", ":", "config_1", "=", "config_get", "(", "args", ")", ".", "splitlines", "(", ")", "args", ".", "project", "=", "args", ".", "Project", "args", ".", "workspace", "=", "args", ".", "Workspace", "cfg_1_name", "=", "args", ".", "config", "if", "args", ".", "Config", "is", "not", "None", ":", "args", ".", "config", "=", "args", ".", "Config", "if", "args", ".", "Namespace", "is", "not", "None", ":", "args", ".", "namespace", "=", "args", ".", "Namespace", "config_2", "=", "config_get", "(", "args", ")", ".", "splitlines", "(", ")", "if", "not", "args", ".", "verbose", ":", "config_1", "=", "skip_cfg_ver", "(", "config_1", ")", "config_2", "=", "skip_cfg_ver", "(", "config_2", ")", "return", "list", "(", "unified_diff", "(", "config_1", ",", "config_2", ",", "cfg_1_name", ",", "args", ".", "config", ",", "lineterm", "=", "''", ")", ")" ]
Adds an external reference to the role
def add_external_reference ( self , ext_ref ) : #check if the externalreferences sublayer exist for the role, and create it in case node_ext_refs = self . node . find ( 'externalReferences' ) ext_refs = None if node_ext_refs == None : ext_refs = CexternalReferences ( ) self . node . append ( ext_refs . get_node ( ) ) else : ext_refs = CexternalReferences ( node_ext_refs ) ext_refs . add_external_reference ( ext_ref )
6,606
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L114-L129
[ "def", "save_all_ggr_logs", "(", "cls", ",", "test_name", ",", "test_passed", ")", ":", "log_name", "=", "'{} [driver {}]'", "if", "len", "(", "cls", ".", "driver_wrappers", ")", ">", "1", "else", "'{}'", "driver_index", "=", "1", "for", "driver_wrapper", "in", "cls", ".", "driver_wrappers", ":", "if", "not", "driver_wrapper", ".", "driver", "or", "driver_wrapper", ".", "server_type", "not", "in", "[", "'ggr'", ",", "'selenoid'", "]", ":", "continue", "try", ":", "if", "driver_wrapper", ".", "config", ".", "getboolean_optional", "(", "'Server'", ",", "'logs_enabled'", ")", "or", "not", "test_passed", ":", "name", "=", "get_valid_filename", "(", "log_name", ".", "format", "(", "test_name", ",", "driver_index", ")", ")", "Selenoid", "(", "driver_wrapper", ")", ".", "download_session_log", "(", "name", ")", "except", "Exception", "as", "exc", ":", "# Capture exceptions to avoid errors in teardown method due to session timeouts", "driver_wrapper", ".", "logger", ".", "warn", "(", "'Error downloading GGR logs: %s'", "%", "exc", ")", "driver_index", "+=", "1" ]
Removes any external reference from the role
def remove_external_references ( self ) : for ex_ref_node in self . node . findall ( 'externalReferences' ) : self . node . remove ( ex_ref_node )
6,607
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L132-L137
[ "def", "save_all_ggr_logs", "(", "cls", ",", "test_name", ",", "test_passed", ")", ":", "log_name", "=", "'{} [driver {}]'", "if", "len", "(", "cls", ".", "driver_wrappers", ")", ">", "1", "else", "'{}'", "driver_index", "=", "1", "for", "driver_wrapper", "in", "cls", ".", "driver_wrappers", ":", "if", "not", "driver_wrapper", ".", "driver", "or", "driver_wrapper", ".", "server_type", "not", "in", "[", "'ggr'", ",", "'selenoid'", "]", ":", "continue", "try", ":", "if", "driver_wrapper", ".", "config", ".", "getboolean_optional", "(", "'Server'", ",", "'logs_enabled'", ")", "or", "not", "test_passed", ":", "name", "=", "get_valid_filename", "(", "log_name", ".", "format", "(", "test_name", ",", "driver_index", ")", ")", "Selenoid", "(", "driver_wrapper", ")", ".", "download_session_log", "(", "name", ")", "except", "Exception", "as", "exc", ":", "# Capture exceptions to avoid errors in teardown method due to session timeouts", "driver_wrapper", ".", "logger", ".", "warn", "(", "'Error downloading GGR logs: %s'", "%", "exc", ")", "driver_index", "+=", "1" ]
Removes any external references on any of the roles from the predicate
def remove_external_references_from_roles ( self ) : for node_role in self . node . findall ( 'role' ) : role = Crole ( node_role ) role . remove_external_references ( )
6,608
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L290-L297
[ "def", "template", "(", "client", ",", "src", ",", "dest", ",", "paths", ",", "opt", ")", ":", "key_map", "=", "cli_hash", "(", "opt", ".", "key_map", ")", "obj", "=", "{", "}", "for", "path", "in", "paths", ":", "response", "=", "client", ".", "read", "(", "path", ")", "if", "not", "response", ":", "raise", "aomi", ".", "exceptions", ".", "VaultData", "(", "\"Unable to retrieve %s\"", "%", "path", ")", "if", "is_aws", "(", "response", "[", "'data'", "]", ")", "and", "'sts'", "not", "in", "path", ":", "renew_secret", "(", "client", ",", "response", ",", "opt", ")", "for", "s_k", ",", "s_v", "in", "response", "[", "'data'", "]", ".", "items", "(", ")", ":", "o_key", "=", "s_k", "if", "s_k", "in", "key_map", ":", "o_key", "=", "key_map", "[", "s_k", "]", "k_name", "=", "secret_key_name", "(", "path", ",", "o_key", ",", "opt", ")", ".", "lower", "(", ")", ".", "replace", "(", "'-'", ",", "'_'", ")", "obj", "[", "k_name", "]", "=", "s_v", "template_obj", "=", "blend_vars", "(", "obj", ",", "opt", ")", "output", "=", "render", "(", "grok_template_file", "(", "src", ")", ",", "template_obj", ")", "write_raw_file", "(", "output", ",", "abspath", "(", "dest", ")", ")" ]
Adds a list of roles to the predicate
def add_roles ( self , list_of_roles ) : for role in list_of_roles : role_node = role . get_node ( ) self . node . append ( role_node )
6,609
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L308-L316
[ "def", "rekey", "(", "self", ",", "key", ",", "nonce", "=", "None", ",", "recovery_key", "=", "False", ")", ":", "params", "=", "{", "'key'", ":", "key", ",", "}", "if", "nonce", "is", "not", "None", ":", "params", "[", "'nonce'", "]", "=", "nonce", "api_path", "=", "'/v1/sys/rekey/update'", "if", "recovery_key", ":", "api_path", "=", "'/v1/sys/rekey-recovery-key/update'", "response", "=", "self", ".", "_adapter", ".", "put", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
Add a role to the predicate
def add_role ( self , role_obj ) : role_node = role_obj . get_node ( ) self . node . append ( role_node )
6,610
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L318-L325
[ "def", "create_or_update_secret", "(", "self", ",", "path", ",", "secret", ",", "cas", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'options'", ":", "{", "}", ",", "'data'", ":", "secret", ",", "}", "if", "cas", "is", "not", "None", ":", "params", "[", "'options'", "]", "[", "'cas'", "]", "=", "cas", "api_path", "=", "'/v1/{mount_point}/data/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "response", "=", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
Adds an external reference to a role identifier
def add_external_reference_to_role ( self , role_id , ext_ref ) : node_role = self . map_roleid_node [ role_id ] obj_role = Crole ( node_role ) obj_role . add_external_reference ( ext_ref )
6,611
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L377-L387
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Adds a predicate object to the layer
def add_predicate ( self , pred_obj ) : pred_id = pred_obj . get_id ( ) if not pred_id in self . idx : pred_node = pred_obj . get_node ( ) self . node . append ( pred_node ) self . idx [ pred_id ] = pred_node else : #FIXME we want new id rather than ignoring the element print ( 'Error: trying to add new element, but id has already been given' )
6,612
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/srl_data.py#L390-L403
[ "def", "get_val", "(", "dataset", ",", "timestamp", "=", "None", ")", ":", "if", "dataset", ".", "type", "==", "'array'", ":", "#TODO: design a mechansim to retrieve this data if it's stored externally", "return", "json", ".", "loads", "(", "dataset", ".", "value", ")", "elif", "dataset", ".", "type", "==", "'descriptor'", ":", "return", "str", "(", "dataset", ".", "value", ")", "elif", "dataset", ".", "type", "==", "'scalar'", ":", "return", "Decimal", "(", "str", "(", "dataset", ".", "value", ")", ")", "elif", "dataset", ".", "type", "==", "'timeseries'", ":", "#TODO: design a mechansim to retrieve this data if it's stored externally", "val", "=", "dataset", ".", "value", "seasonal_year", "=", "config", ".", "get", "(", "'DEFAULT'", ",", "'seasonal_year'", ",", "'1678'", ")", "seasonal_key", "=", "config", ".", "get", "(", "'DEFAULT'", ",", "'seasonal_key'", ",", "'9999'", ")", "val", "=", "dataset", ".", "value", ".", "replace", "(", "seasonal_key", ",", "seasonal_year", ")", "timeseries", "=", "pd", ".", "read_json", "(", "val", ",", "convert_axes", "=", "True", ")", "if", "timestamp", "is", "None", ":", "return", "timeseries", "else", ":", "try", ":", "idx", "=", "timeseries", ".", "index", "#Seasonal timeseries are stored in the year", "#1678 (the lowest year pandas allows for valid times).", "#Therefore if the timeseries is seasonal,", "#the request must be a seasonal request, not a", "#standard request", "if", "type", "(", "idx", ")", "==", "pd", ".", "DatetimeIndex", ":", "if", "set", "(", "idx", ".", "year", ")", "==", "set", "(", "[", "int", "(", "seasonal_year", ")", "]", ")", ":", "if", "isinstance", "(", "timestamp", ",", "list", ")", ":", "seasonal_timestamp", "=", "[", "]", "for", "t", "in", "timestamp", ":", "t_1900", "=", "t", ".", "replace", "(", "year", "=", "int", "(", "seasonal_year", ")", ")", "seasonal_timestamp", ".", "append", "(", "t_1900", ")", "timestamp", "=", "seasonal_timestamp", "else", ":", "timestamp", "=", "[", "timestamp", ".", "replace", "(", "year", "=", "int", "(", "seasonal_year", ")", ")", "]", "pandas_ts", "=", "timeseries", ".", "reindex", "(", "timestamp", ",", "method", "=", "'ffill'", ")", "#If there are no values at all, just return None", "if", "len", "(", "pandas_ts", ".", "dropna", "(", ")", ")", "==", "0", ":", "return", "None", "#Replace all numpy NAN values with None", "pandas_ts", "=", "pandas_ts", ".", "where", "(", "pandas_ts", ".", "notnull", "(", ")", ",", "None", ")", "val_is_array", "=", "False", "if", "len", "(", "pandas_ts", ".", "columns", ")", ">", "1", ":", "val_is_array", "=", "True", "if", "val_is_array", ":", "if", "type", "(", "timestamp", ")", "is", "list", "and", "len", "(", "timestamp", ")", "==", "1", ":", "ret_val", "=", "pandas_ts", ".", "loc", "[", "timestamp", "[", "0", "]", "]", ".", "values", ".", "tolist", "(", ")", "else", ":", "ret_val", "=", "pandas_ts", ".", "loc", "[", "timestamp", "]", ".", "values", ".", "tolist", "(", ")", "else", ":", "col_name", "=", "pandas_ts", ".", "loc", "[", "timestamp", "]", ".", "columns", "[", "0", "]", "if", "type", "(", "timestamp", ")", "is", "list", "and", "len", "(", "timestamp", ")", "==", "1", ":", "ret_val", "=", "pandas_ts", ".", "loc", "[", "timestamp", "[", "0", "]", "]", ".", "loc", "[", "col_name", "]", "else", ":", "ret_val", "=", "pandas_ts", ".", "loc", "[", "timestamp", "]", "[", "col_name", "]", ".", "values", ".", "tolist", "(", ")", "return", "ret_val", "except", "Exception", "as", "e", ":", "log", ".", "critical", "(", "\"Unable to retrive data. Check timestamps.\"", ")", "log", ".", "critical", "(", "e", ")" ]
show a value assocciated with an attribute for each DataProperty instance in the dp_matrix
def display_dp_matrix_attr ( dp_matrix , attr_name ) : print ( ) print ( "---------- {:s} ----------" . format ( attr_name ) ) for dp_list in dp_matrix : print ( [ getattr ( dp , attr_name ) for dp in dp_list ] )
6,613
https://github.com/thombashi/DataProperty/blob/1d1a4c6abee87264c2f870a932c0194895d80a18/examples/py/to_dp_matrix.py#L16-L25
[ "def", "console_wait_for_keypress", "(", "flush", ":", "bool", ")", "->", "Key", ":", "key", "=", "Key", "(", ")", "lib", ".", "TCOD_console_wait_for_keypress_wrapper", "(", "key", ".", "key_p", ",", "flush", ")", "return", "key" ]
Executes the specified sql query and returns the cursor
def _query ( self , sql , * args ) : if not self . _con : logger . debug ( ( "Open MBTiles file '%s'" ) % self . filename ) self . _con = sqlite3 . connect ( self . filename ) self . _cur = self . _con . cursor ( ) sql = ' ' . join ( sql . split ( ) ) logger . debug ( ( "Execute query '%s' %s" ) % ( sql , args ) ) try : self . _cur . execute ( sql , * args ) except ( sqlite3 . OperationalError , sqlite3 . DatabaseError ) as e : raise InvalidFormatError ( ( "%s while reading %s" ) % ( e , self . filename ) ) return self . _cur
6,614
https://github.com/kamicut/tilepie/blob/103ae2be1c3c4e6f7ec4a3bdd265ffcddee92b96/tilepie/reader.py#L33-L45
[ "def", "add", "(", "self", ",", "extension", ")", ":", "index", "=", "len", "(", "self", ".", "extensions", ")", "self", ".", "extensions", "[", "index", "]", "=", "extension", "for", "protocol", "in", "extension", ".", "protocols", ":", "self", ".", "registry", "[", "protocol", "]", "=", "index" ]
Sets the comment for the element
def set_comment ( self , c ) : c = ' ' + c . replace ( '-' , '' ) . strip ( ) + ' ' self . node . insert ( 0 , etree . Comment ( c ) )
6,615
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/opinion_data.py#L47-L54
[ "def", "fetcher_factory", "(", "conf", ")", ":", "global", "PROMOTERS", "applicable", "=", "[", "]", "if", "not", "PROMOTERS", ":", "PROMOTERS", "=", "load_promoters", "(", ")", "for", "promoter", "in", "PROMOTERS", ":", "if", "promoter", ".", "is_applicable", "(", "conf", ")", ":", "applicable", ".", "append", "(", "(", "promoter", ".", "PRIORITY", ",", "promoter", ")", ")", "if", "applicable", ":", "best_match", "=", "sorted", "(", "applicable", ",", "reverse", "=", "True", ")", "[", "0", "]", "[", "1", "]", "return", "best_match", "(", "conf", ")", "else", ":", "raise", "ConfigurationError", "(", "'No fetcher is applicable for \"{0}\"'", ".", "format", "(", "conf", "[", "'name'", "]", ")", ")" ]
Sets the opinion identifier
def set_id ( self , my_id ) : if self . type == 'NAF' : self . node . set ( 'id' , my_id ) elif self . type == 'KAF' : self . node . set ( 'oid' , my_id )
6,616
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/opinion_data.py#L325-L334
[ "def", "finish", "(", "self", ")", ":", "log", ".", "debug", "(", "\"Session disconnected.\"", ")", "try", ":", "self", ".", "sock", ".", "shutdown", "(", "socket", ".", "SHUT_RDWR", ")", "except", ":", "pass", "self", ".", "session_end", "(", ")" ]
Converts the opinion layer to KAF
def to_kaf ( self ) : if self . type == 'NAF' : for node in self . __get_opinion_nodes ( ) : node . set ( 'oid' , node . get ( 'id' ) ) del node . attrib [ 'id' ]
6,617
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/opinion_data.py#L453-L460
[ "def", "_ParseWtmp", "(", ")", ":", "users", "=", "{", "}", "wtmp_struct_size", "=", "UtmpStruct", ".", "GetSize", "(", ")", "filenames", "=", "glob", ".", "glob", "(", "\"/var/log/wtmp*\"", ")", "+", "[", "\"/var/run/utmp\"", "]", "for", "filename", "in", "filenames", ":", "try", ":", "wtmp", "=", "open", "(", "filename", ",", "\"rb\"", ")", ".", "read", "(", ")", "except", "IOError", ":", "continue", "for", "offset", "in", "range", "(", "0", ",", "len", "(", "wtmp", ")", ",", "wtmp_struct_size", ")", ":", "try", ":", "record", "=", "UtmpStruct", "(", "wtmp", "[", "offset", ":", "offset", "+", "wtmp_struct_size", "]", ")", "except", "utils", ".", "ParsingError", ":", "break", "# Users only appear for USER_PROCESS events, others are system.", "if", "record", ".", "ut_type", "!=", "7", ":", "continue", "try", ":", "if", "users", "[", "record", ".", "ut_user", "]", "<", "record", ".", "tv_sec", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "except", "KeyError", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "return", "users" ]
Converts the opinion layer to NAF
def to_naf ( self ) : if self . type == 'KAF' : for node in self . __get_opinion_nodes ( ) : node . set ( 'id' , node . get ( 'oid' ) ) del node . attrib [ 'oid' ]
6,618
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/opinion_data.py#L462-L469
[ "def", "_remove_brackets", "(", "x", ",", "i", ")", ":", "assert", "x", "[", "i", "]", "[", "'t'", "]", "==", "'Cite'", "assert", "i", ">", "0", "and", "i", "<", "len", "(", "x", ")", "-", "1", "# Check if the surrounding elements are strings", "if", "not", "x", "[", "i", "-", "1", "]", "[", "'t'", "]", "==", "x", "[", "i", "+", "1", "]", "[", "'t'", "]", "==", "'Str'", ":", "return", "# Trim off curly brackets", "if", "x", "[", "i", "-", "1", "]", "[", "'c'", "]", ".", "endswith", "(", "'{'", ")", "and", "x", "[", "i", "+", "1", "]", "[", "'c'", "]", ".", "startswith", "(", "'}'", ")", ":", "if", "len", "(", "x", "[", "i", "+", "1", "]", "[", "'c'", "]", ")", ">", "1", ":", "x", "[", "i", "+", "1", "]", "[", "'c'", "]", "=", "x", "[", "i", "+", "1", "]", "[", "'c'", "]", "[", "1", ":", "]", "else", ":", "del", "x", "[", "i", "+", "1", "]", "if", "len", "(", "x", "[", "i", "-", "1", "]", "[", "'c'", "]", ")", ">", "1", ":", "x", "[", "i", "-", "1", "]", "[", "'c'", "]", "=", "x", "[", "i", "-", "1", "]", "[", "'c'", "]", "[", ":", "-", "1", "]", "else", ":", "del", "x", "[", "i", "-", "1", "]" ]
Removes the opinion for the given opinion identifier
def remove_this_opinion ( self , opinion_id ) : for opi in self . get_opinions ( ) : if opi . get_id ( ) == opinion_id : self . node . remove ( opi . get_node ( ) ) break
6,619
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/opinion_data.py#L488-L497
[ "def", "disconnect", "(", "self", ")", ":", "_LOGGING", ".", "debug", "(", "'Disconnecting from stream: %s'", ",", "self", ".", "name", ")", "self", ".", "kill_thrd", ".", "set", "(", ")", "self", ".", "thrd", ".", "join", "(", ")", "_LOGGING", ".", "debug", "(", "'Event stream thread for %s is stopped'", ",", "self", ".", "name", ")", "self", ".", "kill_thrd", ".", "clear", "(", ")" ]
Return account details dict associated with the provided alias .
def GetAccountDetails ( alias = None ) : if not alias : alias = Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Account/GetAccountDetails' , { 'AccountAlias' : alias } ) if r [ 'Success' ] != True : if clc . args : clc . v1 . output . Status ( 'ERROR' , 3 , 'Error calling %s. Status code %s. %s' % ( 'Account/GetAccountDetails' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) raise Exception ( 'Error calling %s. Status code %s. %s' % ( 'Account/GetAccountDetails' , r [ 'StatusCode' ] , r [ 'Message' ] ) ) elif int ( r [ 'StatusCode' ] ) == 0 : r [ 'AccountDetails' ] [ 'Status' ] = Account . account_status_itos [ r [ 'AccountDetails' ] [ 'Status' ] ] return ( r [ 'AccountDetails' ] )
6,620
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/account.py#L31-L40
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Return account inventory dict containing all subaccounts for the given alias . If None search from default alias .
def GetAccounts ( alias = None ) : if alias is not None : payload = { 'AccountAlias' : alias } else : payload = { } r = clc . v1 . API . Call ( 'post' , 'Account/GetAccounts' , payload ) if int ( r [ 'StatusCode' ] ) == 0 : # Assume first response is always the original account. Not sure if this is reliable if not clc . ALIAS : clc . ALIAS = r [ 'Accounts' ] [ 0 ] [ 'AccountAlias' ] if not clc . LOCATION : clc . LOCATION = r [ 'Accounts' ] [ 0 ] [ 'Location' ] return ( r [ 'Accounts' ] )
6,621
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/account.py#L56-L66
[ "def", "handle_input", "(", "self", ")", ":", "difference", "=", "self", ".", "check_state", "(", ")", "if", "not", "difference", ":", "return", "self", ".", "events", "=", "[", "]", "self", ".", "handle_new_events", "(", "difference", ")", "self", ".", "update_timeval", "(", ")", "self", ".", "events", ".", "append", "(", "self", ".", "sync_marker", "(", "self", ".", "timeval", ")", ")", "self", ".", "write_to_pipe", "(", "self", ".", "events", ")" ]
Assure that a project directory has a cache folder . If not it will create it .
def assure_cache ( project_path = None ) : project_path = path ( project_path , ISDIR ) cache_path = os . path . join ( project_path , CACHE_NAME ) if not os . path . isdir ( cache_path ) : os . mkdir ( cache_path )
6,622
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/project.py#L13-L23
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Purge a directory of anything cyther related
def purge_project ( ) : print ( 'Current Directory: {}' . format ( os . getcwd ( ) ) ) directories = os . listdir ( os . getcwd ( ) ) if CACHE_NAME in directories : response = get_input ( "Would you like to delete the cache and" "everything in it? [y/n]: " , ( 'y' , 'n' ) ) if response == 'y' : print ( "Listing local '__cythercache__':" ) cache_dir = os . path . join ( os . getcwd ( ) , "__cythercache__" ) to_delete = [ ] contents = os . listdir ( cache_dir ) if contents : for filename in contents : print ( '\t' + filename ) filepath = os . path . join ( cache_dir , filename ) to_delete . append ( filepath ) else : print ( "\tNothing was found in the cache" ) check_response = get_input ( "Delete all these files? (^)" "[y/n]: " , ( 'y' , 'n' ) ) if check_response == 'y' : for filepath in to_delete : os . remove ( filepath ) os . rmdir ( cache_dir ) else : print ( "Skipping the deletion... all files are fine!" ) else : print ( "Skipping deletion of the cache" ) else : print ( "Couldn't find a cache file ('{}') in this " "directory" . format ( CACHE_NAME ) )
6,623
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/project.py#L33-L67
[ "def", "interpoled_resampling", "(", "W", ",", "x", ")", ":", "N", "=", "W", ".", "shape", "[", "0", "]", "idx", "=", "np", ".", "argsort", "(", "x", ")", "xs", "=", "x", "[", "idx", "]", "ws", "=", "W", "[", "idx", "]", "cs", "=", "np", ".", "cumsum", "(", "avg_n_nplusone", "(", "ws", ")", ")", "u", "=", "random", ".", "rand", "(", "N", ")", "xrs", "=", "np", ".", "empty", "(", "N", ")", "where", "=", "np", ".", "searchsorted", "(", "cs", ",", "u", ")", "# costs O(N log(N)) but algorithm has O(N log(N)) complexity anyway", "for", "n", "in", "range", "(", "N", ")", ":", "m", "=", "where", "[", "n", "]", "if", "m", "==", "0", ":", "xrs", "[", "n", "]", "=", "xs", "[", "0", "]", "elif", "m", "==", "N", ":", "xrs", "[", "n", "]", "=", "xs", "[", "-", "1", "]", "else", ":", "xrs", "[", "n", "]", "=", "interpol", "(", "cs", "[", "m", "-", "1", "]", ",", "cs", "[", "m", "]", ",", "xs", "[", "m", "-", "1", "]", ",", "xs", "[", "m", "]", ",", "u", "[", "n", "]", ")", "return", "xrs" ]
a parallelized work - alike to the built - in map function
def map ( func , items , pool_size = 10 ) : with OrderedPool ( func , pool_size ) as pool : for count , item in enumerate ( items ) : pool . put ( item ) for i in xrange ( count + 1 ) : yield pool . get ( )
6,624
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/pool.py#L255-L280
[ "def", "_parseSegments", "(", "self", ",", "data", ",", "elfHeader", ")", ":", "offset", "=", "elfHeader", ".", "header", ".", "e_phoff", "segments", "=", "[", "]", "for", "i", "in", "range", "(", "elfHeader", ".", "header", ".", "e_phnum", ")", ":", "phdr", "=", "self", ".", "__classes", ".", "PHDR", ".", "from_buffer", "(", "data", ",", "offset", ")", "segment_bytes", "=", "(", "c_ubyte", "*", "phdr", ".", "p_filesz", ")", ".", "from_buffer", "(", "data", ",", "phdr", ".", "p_offset", ")", "phdrData", "=", "PhdrData", "(", "header", "=", "phdr", ",", "raw", "=", "segment_bytes", ",", "bytes", "=", "bytearray", "(", "segment_bytes", ")", ",", "type", "=", "PT", "[", "phdr", ".", "p_type", "]", ",", "vaddr", "=", "phdr", ".", "p_vaddr", ",", "offset", "=", "phdr", ".", "p_offset", ")", "segments", ".", "append", "(", "phdrData", ")", "offset", "+=", "elfHeader", ".", "header", ".", "e_phentsize", "return", "segments" ]
start the pool s workers
def start ( self ) : for i in xrange ( self . size ) : scheduler . schedule ( self . _runner ) self . _closing = False
6,625
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/pool.py#L36-L40
[ "def", "_results_tc_args", "(", "self", ")", ":", "results", "=", "[", "]", "if", "os", ".", "access", "(", "self", ".", "default_args", ".", "tc_out_path", ",", "os", ".", "W_OK", ")", ":", "result_file", "=", "'{}/results.tc'", ".", "format", "(", "self", ".", "default_args", ".", "tc_out_path", ")", "else", ":", "result_file", "=", "'results.tc'", "if", "os", ".", "path", ".", "isfile", "(", "result_file", ")", ":", "with", "open", "(", "result_file", ",", "'r'", ")", "as", "rh", ":", "results", "=", "rh", ".", "read", "(", ")", ".", "strip", "(", ")", ".", "split", "(", "'\\n'", ")", "os", ".", "remove", "(", "result_file", ")", "for", "line", "in", "results", ":", "if", "not", "line", "or", "' = '", "not", "in", "line", ":", "continue", "key", ",", "value", "=", "line", ".", "split", "(", "' = '", ")", "if", "value", "==", "'true'", ":", "value", "=", "True", "elif", "value", "==", "'false'", ":", "value", "=", "False", "elif", "not", "value", ":", "value", "=", "None", "setattr", "(", "self", ".", "_default_args", ",", "key", ",", "value", ")" ]
place a new item into the pool to be handled by the workers
def put ( self , * args , * * kwargs ) : self . inq . put ( ( self . _putcount , ( args , kwargs ) ) ) self . _putcount += 1
6,626
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/pool.py#L207-L214
[ "def", "_check_registry_type", "(", "folder", "=", "None", ")", ":", "folder", "=", "_registry_folder", "(", "folder", ")", "default_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'registry_type.txt'", ")", "try", ":", "with", "open", "(", "default_file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "data", "=", "data", ".", "strip", "(", ")", "ComponentRegistry", ".", "SetBackingStore", "(", "data", ")", "except", "IOError", ":", "pass" ]
Returns the entity object for the given entity identifier
def get_entity ( self , entity_id ) : entity_node = self . map_entity_id_to_node . get ( entity_id ) if entity_node is not None : return Centity ( node = entity_node , type = self . type ) else : for entity_node in self . __get_entity_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'eid' if entity_node . get ( label_id ) == entity_id : return Centity ( node = entity_node , type = self . type ) return None
6,627
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/entity_data.py#L181-L200
[ "def", "create_new_connection", "(", "from_port", ",", "to_port", ")", ":", "from", "rafcon", ".", "gui", ".", "mygaphas", ".", "items", ".", "ports", "import", "ScopedVariablePortView", ",", "LogicPortView", ",", "DataPortView", "if", "isinstance", "(", "from_port", ",", "LogicPortView", ")", "and", "isinstance", "(", "to_port", ",", "LogicPortView", ")", ":", "return", "add_transition_to_state", "(", "from_port", ",", "to_port", ")", "elif", "isinstance", "(", "from_port", ",", "(", "DataPortView", ",", "ScopedVariablePortView", ")", ")", "and", "isinstance", "(", "to_port", ",", "(", "DataPortView", ",", "ScopedVariablePortView", ")", ")", ":", "return", "add_data_flow_to_state", "(", "from_port", ",", "to_port", ")", "# Both ports are not None", "elif", "from_port", "and", "to_port", ":", "logger", ".", "error", "(", "\"Connection of non-compatible ports: {0} and {1}\"", ".", "format", "(", "type", "(", "from_port", ")", ",", "type", "(", "to_port", ")", ")", ")", "return", "False" ]
Adds an external reference to a entity specified by the entity identifier
def add_external_reference_to_entity ( self , entity_id , ext_ref ) : node_entity = self . map_entity_id_to_node . get ( entity_id ) if node_entity is not None : entity = Centity ( node_entity , self . type ) entity . add_external_reference ( ext_ref ) else : print >> sys . stderr , 'Trying to add a reference to the entity' , entity_id , 'but can not be found in this file'
6,628
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/entity_data.py#L202-L215
[ "def", "ulocalized_gmt0_time", "(", "self", ",", "time", ",", "context", ",", "request", ")", ":", "value", "=", "get_date", "(", "context", ",", "time", ")", "if", "not", "value", ":", "return", "\"\"", "# DateTime is stored with TimeZone, but DateTimeWidget omits TZ", "value", "=", "value", ".", "toZone", "(", "\"GMT+0\"", ")", "return", "self", ".", "ulocalized_time", "(", "value", ",", "context", ",", "request", ")" ]
Converts the layer from KAF to NAF
def to_kaf ( self ) : if self . type == 'NAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'eid' , node . get ( 'id' ) ) del node . attrib [ 'id' ]
6,629
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/entity_data.py#L226-L233
[ "def", "beacon", "(", "config", ")", ":", "parts", "=", "psutil", ".", "disk_partitions", "(", "all", "=", "True", ")", "ret", "=", "[", "]", "for", "mounts", "in", "config", ":", "mount", "=", "next", "(", "iter", "(", "mounts", ")", ")", "# Because we're using regular expressions", "# if our mount doesn't end with a $, insert one.", "mount_re", "=", "mount", "if", "not", "mount", ".", "endswith", "(", "'$'", ")", ":", "mount_re", "=", "'{0}$'", ".", "format", "(", "mount", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# mount_re comes in formatted with a $ at the end", "# can be `C:\\\\$` or `C:\\\\\\\\$`", "# re string must be like `C:\\\\\\\\` regardless of \\\\ or \\\\\\\\", "# also, psutil returns uppercase", "mount_re", "=", "re", ".", "sub", "(", "r':\\\\\\$'", ",", "r':\\\\\\\\'", ",", "mount_re", ")", "mount_re", "=", "re", ".", "sub", "(", "r':\\\\\\\\\\$'", ",", "r':\\\\\\\\'", ",", "mount_re", ")", "mount_re", "=", "mount_re", ".", "upper", "(", ")", "for", "part", "in", "parts", ":", "if", "re", ".", "match", "(", "mount_re", ",", "part", ".", "mountpoint", ")", ":", "_mount", "=", "part", ".", "mountpoint", "try", ":", "_current_usage", "=", "psutil", ".", "disk_usage", "(", "_mount", ")", "except", "OSError", ":", "log", ".", "warning", "(", "'%s is not a valid mount point.'", ",", "_mount", ")", "continue", "current_usage", "=", "_current_usage", ".", "percent", "monitor_usage", "=", "mounts", "[", "mount", "]", "if", "'%'", "in", "monitor_usage", ":", "monitor_usage", "=", "re", ".", "sub", "(", "'%'", ",", "''", ",", "monitor_usage", ")", "monitor_usage", "=", "float", "(", "monitor_usage", ")", "if", "current_usage", ">=", "monitor_usage", ":", "ret", ".", "append", "(", "{", "'diskusage'", ":", "current_usage", ",", "'mount'", ":", "_mount", "}", ")", "return", "ret" ]
Converts the layer from NAF to KAF
def to_naf ( self ) : if self . type == 'KAF' : for node in self . __get_entity_nodes ( ) : node . set ( 'id' , node . get ( 'eid' ) ) del node . attrib [ 'eid' ]
6,630
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/entity_data.py#L235-L242
[ "def", "__validate_enrollment_periods", "(", "self", ",", "enrollments", ")", ":", "for", "a", ",", "b", "in", "itertools", ".", "combinations", "(", "enrollments", ",", "2", ")", ":", "max_start", "=", "max", "(", "a", ".", "start", ",", "b", ".", "start", ")", "min_end", "=", "min", "(", "a", ".", "end", ",", "b", ".", "end", ")", "if", "max_start", "<", "min_end", ":", "msg", "=", "\"invalid GrimoireLab enrollment dates. \"", "\"Organization dates overlap.\"", "raise", "InvalidFormatError", "(", "cause", "=", "msg", ")", "return", "enrollments" ]
Hlavni update metoda . Cinny kod pro gaussovske filtrovani prahovani binarni uzavreni a otevreni a vraceni nejvetsich nebo oznacenych objektu .
def updateImage ( self , val ) : # import ipdb # ipdb.set_trace() # Filtrovani # Zjisteni jakou sigmu pouzit if ( self . firstRun == True and self . inputSigma >= 0 ) : sigma = np . round ( self . inputSigma , 2 ) elif self . interactivity : sigma = np . round ( self . ssigma . val , 2 ) else : sigma = np . round ( self . inputSigma , 2 ) # Prahovani (smin, smax) # max_threshold = self.threshold_upper # min_threshold = self.threshold if self . interactivity : self . smin . val = ( np . round ( self . smin . val , 2 ) ) self . smin . valtext . set_text ( '{}' . format ( self . smin . val ) ) self . smax . val = ( np . round ( self . smax . val , 2 ) ) self . smax . valtext . set_text ( '{}' . format ( self . smax . val ) ) self . threshold = self . smin . val self . threshold_upper = self . smax . val closeNum = int ( np . round ( self . sclose . val , 0 ) ) openNum = int ( np . round ( self . sopen . val , 0 ) ) self . sclose . valtext . set_text ( '{}' . format ( closeNum ) ) self . sopen . valtext . set_text ( '{}' . format ( openNum ) ) else : closeNum = self . ICBinaryClosingIterations openNum = self . ICBinaryOpeningIterations # make_image_processing(sigma, min_threshold, max_threshold, closeNum, openNum, auto_method=self.) self . imgFiltering , self . threshold = make_image_processing ( data = self . data , voxelsize_mm = self . voxelsize_mm , seeds = self . seeds , sigma_mm = sigma , min_threshold = self . threshold , max_threshold = self . threshold_upper , closeNum = closeNum , openNum = openNum , min_threshold_auto_method = self . auto_method , fill_holes = self . fillHoles , get_priority_objects = self . get_priority_objects , nObj = self . nObj ) # Vykresleni dat if ( self . interactivity == True ) : self . drawVisualization ( ) # Nastaveni kontrolnich hodnot self . firstRun = False garbage . collect ( ) self . debugInfo ( )
6,631
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/uiThreshold.py#L413-L479
[ "def", "exception", "(", "self", ",", "url", ",", "exception", ")", ":", "return", "(", "time", ".", "time", "(", ")", "+", "self", ".", "ttl", ",", "self", ".", "factory", "(", "url", ")", ")" ]
Gets a list of anti - affinity policies within a given account .
def GetAll ( alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) policies = [ ] policy_resp = clc . v2 . API . Call ( 'GET' , 'antiAffinityPolicies/%s' % alias , { } , session = session ) for k in policy_resp : r_val = policy_resp [ k ] for r in r_val : if r . get ( 'location' ) : if location and r [ 'location' ] . lower ( ) != location . lower ( ) : continue servers = [ obj [ 'id' ] for obj in r [ 'links' ] if obj [ 'rel' ] == "server" ] policies . append ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = servers , session = session ) ) return ( policies )
6,632
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/anti_affinity.py#L28-L49
[ "def", "write", "(", "self", ",", "data", ")", ":", "begin", ",", "end", ",", "size", "=", "0", ",", "0", ",", "len", "(", "data", ")", "bytes_sent", "=", "0", "raw_write", "=", "super", "(", "USBRawDevice", ",", "self", ")", ".", "write", "while", "not", "end", ">", "size", ":", "begin", "=", "end", "end", "=", "begin", "+", "self", ".", "RECV_CHUNK", "bytes_sent", "+=", "raw_write", "(", "data", "[", "begin", ":", "end", "]", ")", "return", "bytes_sent" ]
Returns a list of anti - affinity policies within a specific location .
def GetLocation ( location = None , alias = None , session = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = session ) return ( AntiAffinity . GetAll ( alias = alias , location = location , session = session ) )
6,633
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/anti_affinity.py#L53-L62
[ "def", "write", "(", "self", ",", "data", ")", ":", "begin", ",", "end", ",", "size", "=", "0", ",", "0", ",", "len", "(", "data", ")", "bytes_sent", "=", "0", "raw_write", "=", "super", "(", "USBRawDevice", ",", "self", ")", ".", "write", "while", "not", "end", ">", "size", ":", "begin", "=", "end", "end", "=", "begin", "+", "self", ".", "RECV_CHUNK", "bytes_sent", "+=", "raw_write", "(", "data", "[", "begin", ":", "end", "]", ")", "return", "bytes_sent" ]
Creates a new anti - affinity policy within a given account .
def Create ( name , alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) if not location : location = clc . v2 . Account . GetLocation ( session = session ) r = clc . v2 . API . Call ( 'POST' , 'antiAffinityPolicies/%s' % alias , json . dumps ( { 'name' : name , 'location' : location } ) , session = session ) return ( AntiAffinity ( id = r [ 'id' ] , name = r [ 'name' ] , location = r [ 'location' ] , servers = [ ] , session = session ) )
6,634
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/anti_affinity.py#L66-L82
[ "def", "write", "(", "self", ",", "data", ")", ":", "begin", ",", "end", ",", "size", "=", "0", ",", "0", ",", "len", "(", "data", ")", "bytes_sent", "=", "0", "raw_write", "=", "super", "(", "USBRawDevice", ",", "self", ")", ".", "write", "while", "not", "end", ">", "size", ":", "begin", "=", "end", "end", "=", "begin", "+", "self", ".", "RECV_CHUNK", "bytes_sent", "+=", "raw_write", "(", "data", "[", "begin", ":", "end", "]", ")", "return", "bytes_sent" ]
Change the policy s name .
def Update ( self , name ) : r = clc . v2 . API . Call ( 'PUT' , 'antiAffinityPolicies/%s/%s' % ( self . alias , self . id ) , { 'name' : name } , session = self . session ) self . name = name
6,635
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/anti_affinity.py#L115-L125
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Return an _AbstractSyntaxTreeNode with some elements defaulted .
def _node ( handler , single = None , multi = None ) : return _AbstractSyntaxTreeNode ( handler = handler , single = ( single if single else [ ] ) , multi = ( multi if multi else [ ] ) )
6,636
https://github.com/polysquare/cmake-ast/blob/431a32d595d76f1f8f993eb6ddcc79effbadff9d/cmakeast/ast_visitor.py#L14-L18
[ "def", "download", "(", "self", ",", "temp_ver", ",", "store_metadata", "=", "True", ")", ":", "dest", "=", "self", ".", "_prefixed", "(", "temp_ver", ".", "name", ")", "temp_dest", "=", "'%s.tmp'", "%", "dest", "with", "utils", ".", "LockFile", "(", "dest", "+", "'.lock'", ")", ":", "# Image was downloaded while we were waiting", "if", "os", ".", "path", ".", "exists", "(", "dest", ")", ":", "return", "temp_ver", ".", "download", "(", "temp_dest", ")", "if", "store_metadata", ":", "with", "open", "(", "'%s.metadata'", "%", "dest", ",", "'w'", ")", "as", "f", ":", "utils", ".", "json_dump", "(", "temp_ver", ".", "get_metadata", "(", ")", ",", "f", ")", "sha1", "=", "utils", ".", "get_hash", "(", "temp_dest", ")", "if", "temp_ver", ".", "get_hash", "(", ")", "!=", "sha1", ":", "raise", "RuntimeError", "(", "'Image %s does not match the expected hash %s'", "%", "(", "temp_ver", ".", "name", ",", "sha1", ",", ")", ")", "with", "open", "(", "'%s.hash'", "%", "dest", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "sha1", ")", "with", "log_utils", ".", "LogTask", "(", "'Convert image'", ",", "logger", "=", "LOGGER", ")", ":", "result", "=", "utils", ".", "run_command", "(", "[", "'qemu-img'", ",", "'convert'", ",", "'-O'", ",", "'raw'", ",", "temp_dest", ",", "dest", ",", "]", ",", ")", "os", ".", "unlink", "(", "temp_dest", ")", "if", "result", ":", "raise", "RuntimeError", "(", "result", ".", "err", ")" ]
Recursive print worker - recurses the AST and prints each node .
def _recurse ( node , * args , * * kwargs ) : node_name = node . __class__ . __name__ try : info_for_node = _NODE_INFO_TABLE [ node_name ] except KeyError : return action = kwargs [ info_for_node . handler ] depth = kwargs [ "depth" ] # Invoke action if available if action is not None : action ( node_name , node , depth ) # Recurse recurse_kwargs = kwargs kwargs [ "depth" ] = depth + 1 for single in info_for_node . single : _recurse ( getattr ( node , single ) , * args , * * recurse_kwargs ) for multi in info_for_node . multi : for statement in getattr ( node , multi ) : _recurse ( statement , * args , * * recurse_kwargs )
6,637
https://github.com/polysquare/cmake-ast/blob/431a32d595d76f1f8f993eb6ddcc79effbadff9d/cmakeast/ast_visitor.py#L43-L71
[ "def", "_get_partition_info", "(", "storage_system", ",", "device_path", ")", ":", "try", ":", "partition_infos", "=", "storage_system", ".", "RetrieveDiskPartitionInfo", "(", "devicePath", "=", "[", "device_path", "]", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "log", ".", "trace", "(", "'partition_info = %s'", ",", "partition_infos", "[", "0", "]", ")", "return", "partition_infos", "[", "0", "]" ]
Entry point for AST recursion .
def recurse ( node , * args , * * kwargs ) : # Construct a default table of actions, using action from kwargs # if it is available. These are forwarded to _recurse. fwd = dict ( ) for node_info in _NODE_INFO_TABLE . values ( ) : fwd [ node_info . handler ] = kwargs . get ( node_info . handler , None ) fwd [ "depth" ] = 0 _recurse ( node , * args , * * fwd )
6,638
https://github.com/polysquare/cmake-ast/blob/431a32d595d76f1f8f993eb6ddcc79effbadff9d/cmakeast/ast_visitor.py#L74-L83
[ "def", "update", "(", "self", ",", "id", ",", "name", "=", "None", ",", "description", "=", "None", ",", "image_url", "=", "None", ",", "office_mode", "=", "None", ",", "share", "=", "None", ",", "*", "*", "kwargs", ")", ":", "path", "=", "'{}/update'", ".", "format", "(", "id", ")", "url", "=", "utils", ".", "urljoin", "(", "self", ".", "url", ",", "path", ")", "payload", "=", "{", "'name'", ":", "name", ",", "'description'", ":", "description", ",", "'image_url'", ":", "image_url", ",", "'office_mode'", ":", "office_mode", ",", "'share'", ":", "share", ",", "}", "payload", ".", "update", "(", "kwargs", ")", "response", "=", "self", ".", "session", ".", "post", "(", "url", ",", "json", "=", "payload", ")", "return", "Group", "(", "self", ",", "*", "*", "response", ".", "data", ")" ]
Fill used labels into filename
def get_filename_filled_with_checked_labels ( self , labels = None ) : if labels is None : labels = self . slab_wg . action_check_slab_ui ( ) string_labels = imma . get_nlabels ( slab = self . slab_wg . slab , labels = labels , return_mode = "str" ) filename = self . vtk_file . format ( "-" . join ( string_labels ) ) return filename
6,639
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/show_segmentation_qt.py#L322-L329
[ "def", "waitForEvent", "(", "self", ",", "event_name", ",", "predicate", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "deadline", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "time", ".", "time", "(", ")", "<=", "deadline", ":", "# Calculate the max timeout for the next event rpc call.", "rpc_timeout", "=", "deadline", "-", "time", ".", "time", "(", ")", "if", "rpc_timeout", "<", "0", ":", "break", "# A single RPC call cannot exceed MAX_TIMEOUT.", "rpc_timeout", "=", "min", "(", "rpc_timeout", ",", "MAX_TIMEOUT", ")", "try", ":", "event", "=", "self", ".", "waitAndGet", "(", "event_name", ",", "rpc_timeout", ")", "except", "TimeoutError", ":", "# Ignoring TimeoutError since we need to throw one with a more", "# specific message.", "break", "if", "predicate", "(", "event", ")", ":", "return", "event", "raise", "TimeoutError", "(", "self", ".", "_ad", ",", "'Timed out after %ss waiting for an \"%s\" event that satisfies the '", "'predicate \"%s\".'", "%", "(", "timeout", ",", "event_name", ",", "predicate", ".", "__name__", ")", ")" ]
import and return a named module with patches applied locally only
def patched ( module_name ) : if module_name in _patchers : return _patched_copy ( module_name , _patchers [ module_name ] ) # grab the unpatched version of the module for posterity old_module = sys . modules . pop ( module_name , None ) # apply all the standard library patches we have saved = [ ( module_name , old_module ) ] for name , patch in _patchers . iteritems ( ) : new_mod = _patched_copy ( name , patch ) saved . append ( ( name , sys . modules . pop ( name ) ) ) sys . modules [ name ] = new_mod try : # import the requested module with patches in place result = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) finally : # put all the original modules back as they were for name , old_mod in saved : if old_mod is None : sys . modules . pop ( name , None ) else : sys . modules [ name ] = old_mod return result
6,640
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/emulation/__init__.py#L26-L72
[ "def", "ParseFileEntry", "(", "self", ",", "parser_mediator", ",", "file_entry", ")", ":", "index_file_parser", "=", "ChromeCacheIndexFileParser", "(", ")", "file_object", "=", "file_entry", ".", "GetFileObject", "(", ")", "try", ":", "index_file_parser", ".", "ParseFileObject", "(", "parser_mediator", ",", "file_object", ")", "except", "(", "IOError", ",", "errors", ".", "ParseError", ")", "as", "exception", ":", "file_object", ".", "close", "(", ")", "display_name", "=", "parser_mediator", ".", "GetDisplayName", "(", ")", "raise", "errors", ".", "UnableToParseFile", "(", "'[{0:s}] unable to parse index file {1:s} with error: {2!s}'", ".", "format", "(", "self", ".", "NAME", ",", "display_name", ",", "exception", ")", ")", "# TODO: create event based on index file creation time.", "try", ":", "file_system", "=", "file_entry", ".", "GetFileSystem", "(", ")", "self", ".", "_ParseIndexTable", "(", "parser_mediator", ",", "file_system", ",", "file_entry", ",", "index_file_parser", ".", "index_table", ")", "finally", ":", "file_object", ".", "close", "(", ")" ]
apply emulation patches only for a specific context
def patched_context ( * module_names , * * kwargs ) : local = kwargs . pop ( 'local' , False ) if kwargs : raise TypeError ( "patched_context() got an unexpected keyword " + "argument %r" % kwargs . keys ( ) [ 0 ] ) patch ( * module_names ) if local : @ scheduler . local_incoming_hook @ scheduler . local_outgoing_hook def hook ( direction , target ) : { 1 : patch , 2 : unpatch } [ direction ] ( * module_names ) yield unpatch ( * module_names ) if local : scheduler . remove_local_incoming_hook ( hook ) scheduler . remove_local_outgoing_hook ( hook )
6,641
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/emulation/__init__.py#L76-L105
[ "def", "unindex_layers_with_issues", "(", "self", ",", "use_cache", "=", "False", ")", ":", "from", "hypermap", ".", "aggregator", ".", "models", "import", "Issue", ",", "Layer", ",", "Service", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "layer_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Layer", ")", "service_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Service", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "layer_type", ".", "id", ")", ":", "unindex_layer", "(", "issue", ".", "content_object", ".", "id", ",", "use_cache", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "service_type", ".", "id", ")", ":", "for", "layer", "in", "issue", ".", "content_object", ".", "layer_set", ".", "all", "(", ")", ":", "unindex_layer", "(", "layer", ".", "id", ",", "use_cache", ")" ]
apply monkey - patches to stdlib modules in - place
def patch ( * module_names ) : if not module_names : module_names = _patchers . keys ( ) log . info ( "monkey-patching in-place (%d modules)" % len ( module_names ) ) for module_name in module_names : if module_name not in _patchers : raise ValueError ( "'%s' is not greenhouse-patchable" % module_name ) for module_name in module_names : if module_name in sys . modules : module = sys . modules [ module_name ] else : module = __import__ ( module_name , { } , { } , module_name . rsplit ( "." , 1 ) [ 0 ] ) for attr , patch in _patchers [ module_name ] . items ( ) : setattr ( module , attr , patch )
6,642
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/emulation/__init__.py#L142-L194
[ "def", "getChargingVoltage", "(", "self", ")", ":", "command", "=", "'$GG'", "currentAndVoltage", "=", "self", ".", "sendCommand", "(", "command", ")", "volts", "=", "float", "(", "currentAndVoltage", "[", "2", "]", ")", "/", "1000", "return", "volts" ]
Decode event encoded as JSON by processor
def from_json ( data ) : parsed_data = json . loads ( data ) trigger = TriggerInfo ( parsed_data [ 'trigger' ] [ 'class' ] , parsed_data [ 'trigger' ] [ 'kind' ] , ) # extract content type, needed to decode body content_type = parsed_data [ 'content_type' ] return Event ( body = Event . decode_body ( parsed_data [ 'body' ] , content_type ) , content_type = content_type , trigger = trigger , fields = parsed_data . get ( 'fields' ) , headers = parsed_data . get ( 'headers' ) , _id = parsed_data [ 'id' ] , method = parsed_data [ 'method' ] , path = parsed_data [ 'path' ] , size = parsed_data [ 'size' ] , timestamp = datetime . datetime . utcfromtimestamp ( parsed_data [ 'timestamp' ] ) , url = parsed_data [ 'url' ] , _type = parsed_data [ 'type' ] , type_version = parsed_data [ 'type_version' ] , version = parsed_data [ 'version' ] )
6,643
https://github.com/nuclio/nuclio-sdk-py/blob/5af9ffc19a0d96255ff430bc358be9cd7a57f424/nuclio_sdk/event.py#L74-L99
[ "def", "open", "(", "self", ",", "path", ",", "mode", "=", "'r'", ")", ":", "entry", "=", "self", ".", "find", "(", "path", ")", "if", "entry", "is", "None", ":", "if", "mode", "==", "'r'", ":", "raise", "ValueError", "(", "\"stream does not exists: %s\"", "%", "path", ")", "entry", "=", "self", ".", "create_dir_entry", "(", "path", ",", "'stream'", ",", "None", ")", "else", ":", "if", "not", "entry", ".", "isfile", "(", ")", ":", "raise", "ValueError", "(", "\"can only open stream type DirEntry's\"", ")", "if", "mode", "==", "'w'", ":", "logging", ".", "debug", "(", "\"stream: %s exists, overwriting\"", "%", "path", ")", "self", ".", "free_fat_chain", "(", "entry", ".", "sector_id", ",", "entry", ".", "byte_size", "<", "self", ".", "min_stream_max_size", ")", "entry", ".", "sector_id", "=", "None", "entry", ".", "byte_size", "=", "0", "entry", ".", "class_id", "=", "None", "elif", "mode", "==", "'rw'", ":", "pass", "s", "=", "Stream", "(", "self", ",", "entry", ",", "mode", ")", "return", "s" ]
Decode event body
def decode_body ( body , content_type ) : if isinstance ( body , dict ) : return body else : try : decoded_body = base64 . b64decode ( body ) except : return body if content_type == 'application/json' : try : return json . loads ( decoded_body ) except : pass return decoded_body
6,644
https://github.com/nuclio/nuclio-sdk-py/blob/5af9ffc19a0d96255ff430bc358be9cd7a57f424/nuclio_sdk/event.py#L102-L119
[ "def", "serverinfo", "(", "url", "=", "'http://localhost:8080/manager'", ",", "timeout", "=", "180", ")", ":", "data", "=", "_wget", "(", "'serverinfo'", ",", "{", "}", ",", "url", ",", "timeout", "=", "timeout", ")", "if", "data", "[", "'res'", "]", "is", "False", ":", "return", "{", "'error'", ":", "data", "[", "'msg'", "]", "}", "ret", "=", "{", "}", "data", "[", "'msg'", "]", ".", "pop", "(", "0", ")", "for", "line", "in", "data", "[", "'msg'", "]", ":", "tmp", "=", "line", ".", "split", "(", "':'", ")", "ret", "[", "tmp", "[", "0", "]", ".", "strip", "(", ")", "]", "=", "tmp", "[", "1", "]", ".", "strip", "(", ")", "return", "ret" ]
Converts args and deals with incongruities that argparse couldn t handle
def furtherArgsProcessing ( args ) : if isinstance ( args , str ) : unprocessed = args . strip ( ) . split ( ' ' ) if unprocessed [ 0 ] == 'cyther' : del unprocessed [ 0 ] args = parser . parse_args ( unprocessed ) . __dict__ elif isinstance ( args , argparse . Namespace ) : args = args . __dict__ elif isinstance ( args , dict ) : pass else : raise CytherError ( "Args must be a instance of str or argparse.Namespace, not '{}'" . format ( str ( type ( args ) ) ) ) if args [ 'watch' ] : args [ 'timestamp' ] = True args [ 'watch_stats' ] = { 'counter' : 0 , 'errors' : 0 , 'compiles' : 0 , 'polls' : 0 } args [ 'print_args' ] = True return args
6,645
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/commands.py#L65-L90
[ "def", "metadata", "(", "self", ")", ":", "sheet", "=", "self", ".", "result", ".", "add_sheet", "(", "\"metadata\"", ")", "self", ".", "header", "(", "sheet", ",", "\"metadata\"", ")", "n_row", "=", "1", "# row number", "for", "k", "in", "self", ".", "po", ".", "metadata", ":", "row", "=", "sheet", ".", "row", "(", "n_row", ")", "row", ".", "write", "(", "0", ",", "k", ")", "row", ".", "write", "(", "1", ",", "self", ".", "po", ".", "metadata", "[", "k", "]", ")", "n_row", "+=", "1", "sheet", ".", "flush_row_data", "(", ")" ]
Generates and error checks each file s information before the compilation actually starts
def processFiles ( args ) : to_process = [ ] for filename in args [ 'filenames' ] : file = dict ( ) if args [ 'include' ] : file [ 'include' ] = INCLUDE_STRING + '' . join ( [ '-I' + item for item in args [ 'include' ] ] ) else : file [ 'include' ] = INCLUDE_STRING file [ 'file_path' ] = getPath ( filename ) file [ 'file_base_name' ] = os . path . splitext ( os . path . basename ( file [ 'file_path' ] ) ) [ 0 ] file [ 'no_extension' ] , file [ 'extension' ] = os . path . splitext ( file [ 'file_path' ] ) if file [ 'extension' ] not in CYTHONIZABLE_FILE_EXTS : raise CytherError ( "The file '{}' is not a designated cython file" . format ( file [ 'file_path' ] ) ) base_path = os . path . dirname ( file [ 'file_path' ] ) local_build = args [ 'local' ] if not local_build : cache_name = os . path . join ( base_path , '__cythercache__' ) os . makedirs ( cache_name , exist_ok = True ) file [ 'c_name' ] = os . path . join ( cache_name , file [ 'file_base_name' ] ) + '.c' else : file [ 'c_name' ] = file [ 'no_extension' ] + '.c' file [ 'object_file_name' ] = os . path . splitext ( file [ 'c_name' ] ) [ 0 ] + '.o' output_name = args [ 'output_name' ] if args [ 'watch' ] : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION elif output_name : if os . path . exists ( output_name ) and os . path . isfile ( output_name ) : file [ 'output_name' ] = output_name else : dirname = os . path . dirname ( output_name ) if not dirname : dirname = os . getcwd ( ) if os . path . exists ( dirname ) : file [ 'output_name' ] = output_name else : raise CytherError ( 'The directory specified to write' 'the output file in does not exist' ) else : file [ 'output_name' ] = file [ 'no_extension' ] + DEFAULT_OUTPUT_EXTENSION file [ 'stamp_if_error' ] = 0 to_process . append ( file ) return to_process
6,646
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/commands.py#L93-L147
[ "def", "merge_hooks", "(", "request_hooks", ",", "session_hooks", ",", "dict_class", "=", "OrderedDict", ")", ":", "if", "session_hooks", "is", "None", "or", "session_hooks", ".", "get", "(", "'response'", ")", "==", "[", "]", ":", "return", "request_hooks", "if", "request_hooks", "is", "None", "or", "request_hooks", ".", "get", "(", "'response'", ")", "==", "[", "]", ":", "return", "session_hooks", "return", "merge_setting", "(", "request_hooks", ",", "session_hooks", ",", "dict_class", ")" ]
Given a high level preset it will construct the basic args to pass over . ninja beast minimal swift
def makeCommands ( file ) : commands = [ [ 'cython' , '-a' , '-p' , '-o' , file [ 'c_name' ] , file [ 'file_path' ] ] , [ 'gcc' , '-DNDEBUG' , '-g' , '-fwrapv' , '-O3' , '-Wall' , '-Wextra' , '-pthread' , '-fPIC' , '-c' , file [ 'include' ] , '-o' , file [ 'object_file_name' ] , file [ 'c_name' ] ] , [ 'gcc' , '-g' , '-Wall' , '-Wextra' , '-pthread' , '-shared' , RUNTIME_STRING , '-o' , file [ 'output_name' ] , file [ 'object_file_name' ] , L_OPTION ] ] return commands
6,647
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/commands.py#L206-L220
[ "def", "get_monitors", "(", ")", ":", "count_value", "=", "ctypes", ".", "c_int", "(", "0", ")", "count", "=", "ctypes", ".", "pointer", "(", "count_value", ")", "result", "=", "_glfw", ".", "glfwGetMonitors", "(", "count", ")", "monitors", "=", "[", "result", "[", "i", "]", "for", "i", "in", "range", "(", "count_value", ".", "value", ")", "]", "return", "monitors" ]
Generic OpenET Collection
def collection ( et_model , variable , collections , start_date , end_date , t_interval , geometry , * * kwargs ) : # Load the ET model if et_model . lower ( ) == 'ndvi' : # # DEADBEEF - Manually adding OpenET Model to system path # # This will eventually be handled by import openet modules # import os # model_path = os.path.dirname(os.path.dirname(os.path.dirname( # os.path.abspath(os.path.realpath(__file__))))) # print(model_path) # sys.path.insert(0, os.path.join(model_path, 'openet-ndvi-test')) # print(sys.path) try : import openet . ndvi as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise elif et_model . lower ( ) == 'ssebop' : # # DEADBEEF - Manually adding OpenET Models to system path # # This will eventually be handled by import openet modules # import os # model_path = os.path.dirname(os.path.dirname(os.path.dirname( # os.path.abspath(os.path.realpath(__file__))))) # print(model_path) # sys.path.insert(0, os.path.join(model_path, 'openet-ssebop-test')) try : import openet . ssebop as model except ModuleNotFoundError : print ( '\nThe ET model {} could not be imported' . format ( et_model ) + '\nPlease ensure that the model has been installed' ) return False except Exception as e : print ( 'Unhandled Exception: {}' . format ( e ) ) raise else : # CGM - This could just be a value error exception raise ValueError ( 'unsupported et_model type' ) variable_coll = model . collection ( variable , collections , start_date , end_date , t_interval , geometry , * * kwargs ) return variable_coll
6,648
https://github.com/Open-ET/openet-core-beta/blob/f2b81ccf87bf7e7fe1b9f3dd1d4081d0ec7852db/openet/core/api.py#L16-L117
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Iterator that returns all the terminal objects
def get_terminals_as_list ( self ) : terminalList = [ ] for t_node in self . __get_t_nodes ( ) : terminalList . append ( Cterminal ( t_node ) ) return terminalList
6,649
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/constituency_data.py#L308-L317
[ "def", "verify", "(", "dataset", ",", "publication_date", ",", "source", ",", "refernce_url", ")", ":", "config", "=", "ApiConfig", "(", ")", "client", "=", "ApiClient", "(", "config", ".", "host", ",", "config", ".", "app_id", ",", "config", ".", "app_secret", ")", "client", ".", "check_correct_host", "(", ")", "client", ".", "verify", "(", "dataset", ",", "publication_date", ",", "source", ",", "refernce_url", ")" ]
Iterator that returns all the edge objects
def get_edges_as_list ( self ) : my_edges = [ ] for edge_node in self . __get_edge_nodes ( ) : my_edges . append ( Cedge ( edge_node ) ) return my_edges
6,650
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/constituency_data.py#L374-L383
[ "def", "insert_recording", "(", "hw", ")", ":", "mysql", "=", "utils", ".", "get_mysql_cfg", "(", ")", "connection", "=", "pymysql", ".", "connect", "(", "host", "=", "mysql", "[", "'host'", "]", ",", "user", "=", "mysql", "[", "'user'", "]", ",", "passwd", "=", "mysql", "[", "'passwd'", "]", ",", "db", "=", "mysql", "[", "'db'", "]", ",", "charset", "=", "'utf8mb4'", ",", "cursorclass", "=", "pymysql", ".", "cursors", ".", "DictCursor", ")", "try", ":", "cursor", "=", "connection", ".", "cursor", "(", ")", "sql", "=", "(", "\"INSERT INTO `wm_raw_draw_data` (\"", "\"`user_id`, \"", "\"`data`, \"", "\"`md5data`, \"", "\"`creation_date`, \"", "\"`device_type`, \"", "\"`accepted_formula_id`, \"", "\"`secret`, \"", "\"`ip`, \"", "\"`segmentation`, \"", "\"`internal_id`, \"", "\"`description` \"", "\") VALUES (%s, %s, MD5(data), \"", "\"%s, %s, %s, %s, %s, %s, %s, %s);\"", ")", "data", "=", "(", "hw", ".", "user_id", ",", "hw", ".", "raw_data_json", ",", "getattr", "(", "hw", ",", "'creation_date'", ",", "None", ")", ",", "getattr", "(", "hw", ",", "'device_type'", ",", "''", ")", ",", "getattr", "(", "hw", ",", "'formula_id'", ",", "None", ")", ",", "getattr", "(", "hw", ",", "'secret'", ",", "''", ")", ",", "getattr", "(", "hw", ",", "'ip'", ",", "None", ")", ",", "str", "(", "getattr", "(", "hw", ",", "'segmentation'", ",", "''", ")", ")", ",", "getattr", "(", "hw", ",", "'internal_id'", ",", "''", ")", ",", "getattr", "(", "hw", ",", "'description'", ",", "''", ")", ")", "cursor", ".", "execute", "(", "sql", ",", "data", ")", "connection", ".", "commit", "(", ")", "for", "symbol_id", ",", "strokes", "in", "zip", "(", "hw", ".", "symbol_stream", ",", "hw", ".", "segmentation", ")", ":", "insert_symbol_mapping", "(", "cursor", ".", "lastrowid", ",", "symbol_id", ",", "hw", ".", "user_id", ",", "strokes", ")", "logging", ".", "info", "(", "\"Insert raw data.\"", ")", "except", "pymysql", ".", "err", ".", "IntegrityError", "as", "e", ":", "print", "(", "\"Error: {} (can probably be ignored)\"", ".", "format", "(", "e", ")", ")" ]
Prepare binar segmentation based on input segmentation and labels .
def select_labels ( self , labels = None ) : self . _resize_if_required ( ) segmentation = self . _select_labels ( self . resized_segmentation , labels ) # logger.debug("select labels in show_segmentation {} sum {}".format(labels, np.sum(segmentation))) self . resized_binar_segmentation = segmentation
6,651
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/show_segmentation.py#L126-L135
[ "def", "save", "(", "self", ",", "obj", ")", ":", "cur", "=", "self", ".", "_conn", "(", ")", ".", "cursor", "(", ")", "tabname", "=", "obj", ".", "__class__", ".", "get_table_name", "(", ")", "index_names", "=", "obj", ".", "__class__", ".", "index_names", "(", ")", "or", "[", "]", "col_names", "=", "[", "'id'", ",", "'value'", "]", "+", "index_names", "value_holders", "=", "[", "'%s'", "]", "*", "len", "(", "col_names", ")", "updates", "=", "[", "'%s = EXCLUDED.%s'", "%", "(", "cn", ",", "cn", ")", "for", "cn", "in", "col_names", "[", "1", ":", "]", "]", "if", "not", "obj", ".", "id", ":", "id", "=", "uuid", "(", ")", "obj", ".", "id", "=", "id", "query", "=", "'insert into {0} ({1}) values ({2}) on conflict(id) do update set {3};'", ".", "format", "(", "tabname", ",", "','", ".", "join", "(", "col_names", ")", ",", "','", ".", "join", "(", "value_holders", ")", ",", "','", ".", "join", "(", "updates", ")", ",", ")", "values", "=", "[", "obj", ".", "id", ",", "obj", ".", "to_data", "(", ")", "]", "index_vals", "=", "obj", ".", "indexes", "(", ")", "or", "{", "}", "values", "+=", "[", "index_vals", ".", "get", "(", "name", ",", "'NULL'", ")", "for", "name", "in", "index_names", "]", "with", "self", ".", "_conn", "(", ")", "as", "conn", ":", "with", "conn", ".", "cursor", "(", ")", "as", "cur", ":", "cur", ".", "execute", "(", "query", ",", "tuple", "(", "values", ")", ")" ]
Get selection of labels from input segmentation
def _select_labels ( self , segmentation , labels = None ) : logger . debug ( "select_labels() started with labels={}" . format ( labels ) ) if self . slab is not None and labels is not None : segmentation_out = select_labels ( segmentation , labels , slab = self . slab ) else : logger . warning ( "Nothing found for labels " + str ( labels ) ) un = np . unique ( segmentation ) if len ( un ) < 2 : logger . error ( "Just one label found in input segmenation" ) segmentation_out = ( segmentation > un [ 0 ] ) . astype ( segmentation . dtype ) return segmentation_out
6,652
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/show_segmentation.py#L137-L154
[ "def", "waitForEvent", "(", "self", ",", "event_name", ",", "predicate", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "deadline", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "time", ".", "time", "(", ")", "<=", "deadline", ":", "# Calculate the max timeout for the next event rpc call.", "rpc_timeout", "=", "deadline", "-", "time", ".", "time", "(", ")", "if", "rpc_timeout", "<", "0", ":", "break", "# A single RPC call cannot exceed MAX_TIMEOUT.", "rpc_timeout", "=", "min", "(", "rpc_timeout", ",", "MAX_TIMEOUT", ")", "try", ":", "event", "=", "self", ".", "waitAndGet", "(", "event_name", ",", "rpc_timeout", ")", "except", "TimeoutError", ":", "# Ignoring TimeoutError since we need to throw one with a more", "# specific message.", "break", "if", "predicate", "(", "event", ")", ":", "return", "event", "raise", "TimeoutError", "(", "self", ".", "_ad", ",", "'Timed out after %ss waiting for an \"%s\" event that satisfies the '", "'predicate \"%s\".'", "%", "(", "timeout", ",", "event_name", ",", "predicate", ".", "__name__", ")", ")" ]
Get template by providing name ID or other unique key .
def Get ( self , key ) : for template in self . templates : if template . id == key : return ( template )
6,653
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/template.py#L25-L33
[ "def", "apply", "(", "self", ")", ":", "self", ".", "read_group_info", "(", ")", "if", "self", ".", "tabs", ".", "count", "(", ")", "==", "0", ":", "# disactivate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "False", ")", "self", ".", "button_del", ".", "setEnabled", "(", "False", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "False", ")", "else", ":", "# activate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "True", ")", "self", ".", "button_del", ".", "setEnabled", "(", "True", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "True", ")", "if", "self", ".", "groups", ":", "self", ".", "parent", ".", "overview", ".", "update_position", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "update", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")", "else", ":", "self", ".", "parent", ".", "traces", ".", "reset", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "reset", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")" ]
Search template list by providing partial name ID or other key .
def Search ( self , key ) : results = [ ] for template in self . templates : if template . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) elif template . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( template ) return ( results )
6,654
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/template.py#L36-L46
[ "def", "get_requests_session", "(", ")", ":", "session", "=", "requests", ".", "sessions", ".", "Session", "(", ")", "session", ".", "mount", "(", "'http://'", ",", "HTTPAdapter", "(", "pool_connections", "=", "25", ",", "pool_maxsize", "=", "25", ",", "pool_block", "=", "True", ")", ")", "session", ".", "mount", "(", "'https://'", ",", "HTTPAdapter", "(", "pool_connections", "=", "25", ",", "pool_maxsize", "=", "25", ",", "pool_block", "=", "True", ")", ")", "return", "session" ]
Returns Zulu TS from unix time seconds .
def SecondsToZuluTS ( secs = None ) : if not secs : secs = int ( time . time ( ) ) return ( datetime . utcfromtimestamp ( secs ) . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) )
6,655
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/time_utils.py#L16-L24
[ "def", "delete", "(", "self", ",", "file_id", ")", ":", "res", "=", "self", ".", "_files", ".", "delete_one", "(", "{", "\"_id\"", ":", "file_id", "}", ")", "self", ".", "_chunks", ".", "delete_many", "(", "{", "\"files_id\"", ":", "file_id", "}", ")", "if", "not", "res", ".", "deleted_count", ":", "raise", "NoFile", "(", "\"no file could be deleted because none matched %s\"", "%", "file_id", ")" ]
Main function that is called when DistanceClassifier is run on the command line
def main ( ) : parser = argparse . ArgumentParser ( description = 'DistanceClassifier for classification based on distance measure in feature space.' , add_help = False ) parser . add_argument ( 'INPUT_FILE' , type = str , help = 'Data file to perform DistanceClassifier on; ensure that the class label column is labeled as "class".' ) parser . add_argument ( '-h' , '--help' , action = 'help' , help = 'Show this help message and exit.' ) parser . add_argument ( '-is' , action = 'store' , dest = 'INPUT_SEPARATOR' , default = '\t' , type = str , help = 'Character used to separate columns in the input file.' ) parser . add_argument ( '-d' , action = 'store' , dest = 'D' , default = 'mahalanobis' , choices = [ 'mahalanobis' , 'euclidean' ] , type = str , help = 'Distance metric to use.' ) parser . add_argument ( '-v' , action = 'store' , dest = 'VERBOSITY' , default = 1 , choices = [ 0 , 1 , 2 ] , type = int , help = 'How much information DistanceClassifier communicates while it is running: 0 = none, 1 = minimal, 2 = all.' ) parser . add_argument ( '-s' , action = 'store' , dest = 'RANDOM_STATE' , default = 0 , type = int , help = 'Random state for train/test split.' ) parser . add_argument ( '--version' , action = 'version' , version = 'DistanceClassifier {version}' . format ( version = __version__ ) , help = 'Show DistanceClassifier\'s version number and exit.' ) args = parser . parse_args ( ) if args . VERBOSITY >= 2 : print ( '\nDistanceClassifier settings:' ) for arg in sorted ( args . __dict__ ) : print ( '{}\t=\t{}' . format ( arg , args . __dict__ [ arg ] ) ) print ( '' ) input_data = pd . read_csv ( args . INPUT_FILE , sep = args . INPUT_SEPARATOR ) if 'Class' in input_data . columns . values : input_data . rename ( columns = { 'Label' : 'label' } , inplace = True ) RANDOM_STATE = args . RANDOM_STATE if args . RANDOM_STATE > 0 else None # # training_indices, testing_indices = train_test_split(input_data.index, # stratify=input_data['label'].values, # train_size=0.75, # test_size=0.25, # random_state=RANDOM_STATE) # # training_features = input_data.loc[training_indices].drop('label', axis=1).values # training_classes = input_data.loc[training_indices, 'label'].values # # testing_features = input_data.loc[testing_indices].drop('label', axis=1).values # testing_classes = input_data.loc[testing_indices, 'label'].values # Run and evaluate DistanceClassifier on the training and testing data dc = DistanceClassifier ( d = args . D ) # dc.fit(training_features, training_classes) dc . fit ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) print ( dc . score ( input_data . drop ( 'label' , axis = 1 ) . values , input_data [ 'label' ] . values ) )
6,656
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L172-L227
[ "def", "duplicate_sheet", "(", "self", ",", "source_sheet_id", ",", "insert_sheet_index", "=", "None", ",", "new_sheet_id", "=", "None", ",", "new_sheet_name", "=", "None", ")", ":", "body", "=", "{", "'requests'", ":", "[", "{", "'duplicateSheet'", ":", "{", "'sourceSheetId'", ":", "source_sheet_id", ",", "'insertSheetIndex'", ":", "insert_sheet_index", ",", "'newSheetId'", ":", "new_sheet_id", ",", "'newSheetName'", ":", "new_sheet_name", "}", "}", "]", "}", "data", "=", "self", ".", "batch_update", "(", "body", ")", "properties", "=", "data", "[", "'replies'", "]", "[", "0", "]", "[", "'duplicateSheet'", "]", "[", "'properties'", "]", "worksheet", "=", "Worksheet", "(", "self", ",", "properties", ")", "return", "worksheet" ]
Constructs the DistanceClassifier from the provided training data
def fit ( self , features , classes ) : # class labels classes = self . le . fit_transform ( classes ) # group the data by class label X = [ ] self . mu = [ ] self . Z = [ ] for i in np . unique ( classes ) : X . append ( features [ classes == i ] ) self . mu . append ( np . mean ( X [ i ] , axis = 0 ) ) if self . d == 'mahalanobis' : self . Z . append ( np . cov ( X [ i ] . transpose ( ) ) ) return self
6,657
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L56-L84
[ "def", "toner_status", "(", "self", ",", "filter_supported", ":", "bool", "=", "True", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "toner_status", "=", "{", "}", "for", "color", "in", "self", ".", "COLOR_NAMES", ":", "try", ":", "toner_stat", "=", "self", ".", "data", ".", "get", "(", "'{}_{}'", ".", "format", "(", "SyncThru", ".", "TONER", ",", "color", ")", ",", "{", "}", ")", "if", "filter_supported", "and", "toner_stat", ".", "get", "(", "'opt'", ",", "0", ")", "==", "0", ":", "continue", "else", ":", "toner_status", "[", "color", "]", "=", "toner_stat", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "toner_status", "[", "color", "]", "=", "{", "}", "return", "toner_status" ]
Predict class outputs for an unlabelled feature set
def predict ( self , features ) : # get distance of features to class clusters distances = [ self . _distance ( x ) for x in features ] # assign class label belonging to smallest distance class_predict = [ np . argmin ( d ) for d in distances ] return self . le . inverse_transform ( class_predict )
6,658
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L86-L95
[ "def", "is_registration_possible", "(", "self", ",", "user_info", ")", ":", "return", "self", ".", "get_accessibility", "(", ")", ".", "is_open", "(", ")", "and", "self", ".", "_registration", ".", "is_open", "(", ")", "and", "self", ".", "is_user_accepted_by_access_control", "(", "user_info", ")" ]
returns distance measures for features
def _distance ( self , x ) : distance = np . empty ( [ len ( self . mu ) ] ) for i in np . arange ( len ( self . mu ) ) : if self . d == 'mahalanobis' and self . is_invertible ( self . Z [ i ] ) : distance [ i ] = ( x - self . mu [ i ] ) . dot ( np . linalg . inv ( self . Z [ i ] ) ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) else : distance [ i ] = ( x - self . mu [ i ] ) . dot ( ( x - self . mu [ i ] ) . transpose ( ) ) return distance
6,659
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L97-L107
[ "def", "error403", "(", "error", ")", ":", "tb", "=", "error", ".", "traceback", "if", "isinstance", "(", "tb", ",", "dict", ")", "and", "\"name\"", "in", "tb", "and", "\"uuid\"", "in", "tb", ":", "return", "SimpleTemplate", "(", "PRIVATE_ACCESS_MSG", ")", ".", "render", "(", "name", "=", "error", ".", "traceback", "[", "\"name\"", "]", ",", "uuid", "=", "error", ".", "traceback", "[", "\"uuid\"", "]", ")", "return", "\"Access denied!\"" ]
Estimates the accuracy of the predictions from the constructed feature
def score ( self , features , classes , scoring_function = accuracy_score , * * scoring_function_kwargs ) : if not self . mu : raise ValueError ( 'The DistanceClassifier model must be fit before score() can be called' ) return scoring_function ( classes , self . predict ( features ) , * * scoring_function_kwargs )
6,660
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L126-L145
[ "def", "to_json", "(", "self", ")", ":", "return", "{", "'dry_bulb_max'", ":", "self", ".", "dry_bulb_max", ",", "'dry_bulb_range'", ":", "self", ".", "dry_bulb_range", ",", "'modifier_type'", ":", "self", ".", "modifier_type", ",", "'modifier_schedule'", ":", "self", ".", "modifier_schedule", "}" ]
checks if Z is invertible
def is_invertible ( self , X ) : if len ( X . shape ) == 2 : return X . shape [ 0 ] == X . shape [ 1 ] and np . linalg . matrix_rank ( X ) == X . shape [ 0 ] else : return False
6,661
https://github.com/lacava/DistanceClassifier/blob/cbb8a38a82b453c5821d2a2c3328b581f62e47bc/DistanceClassifier/DistanceClassifier.py#L165-L170
[ "def", "_process_message", "(", "self", ",", "msg", ")", ":", "self", ".", "_logger", ".", "info", "(", "'Processing Message(topic={}, partition={}, offset={}) ...'", ".", "format", "(", "msg", ".", "topic", ",", "msg", ".", "partition", ",", "msg", ".", "offset", ")", ")", "try", ":", "job", "=", "self", ".", "_deserializer", "(", "msg", ".", "value", ")", "job_repr", "=", "get_call_repr", "(", "job", ".", "func", ",", "*", "job", ".", "args", ",", "*", "*", "job", ".", "kwargs", ")", "except", "Exception", "as", "err", ":", "self", ".", "_logger", ".", "exception", "(", "'Job was invalid: {}'", ".", "format", "(", "err", ")", ")", "self", ".", "_execute_callback", "(", "'invalid'", ",", "msg", ",", "None", ",", "None", ",", "None", ",", "None", ")", "else", ":", "self", ".", "_logger", ".", "info", "(", "'Executing job {}: {}'", ".", "format", "(", "job", ".", "id", ",", "job_repr", ")", ")", "if", "job", ".", "timeout", ":", "timer", "=", "threading", ".", "Timer", "(", "job", ".", "timeout", ",", "_thread", ".", "interrupt_main", ")", "timer", ".", "start", "(", ")", "else", ":", "timer", "=", "None", "try", ":", "res", "=", "job", ".", "func", "(", "*", "job", ".", "args", ",", "*", "*", "job", ".", "kwargs", ")", "except", "KeyboardInterrupt", ":", "self", ".", "_logger", ".", "error", "(", "'Job {} timed out or was interrupted'", ".", "format", "(", "job", ".", "id", ")", ")", "self", ".", "_execute_callback", "(", "'timeout'", ",", "msg", ",", "job", ",", "None", ",", "None", ",", "None", ")", "except", "Exception", "as", "err", ":", "self", ".", "_logger", ".", "exception", "(", "'Job {} raised an exception:'", ".", "format", "(", "job", ".", "id", ")", ")", "tb", "=", "traceback", ".", "format_exc", "(", ")", "self", ".", "_execute_callback", "(", "'failure'", ",", "msg", ",", "job", ",", "None", ",", "err", ",", "tb", ")", "else", ":", "self", ".", "_logger", ".", "info", "(", "'Job {} returned: {}'", ".", "format", "(", "job", ".", "id", ",", "res", ")", ")", "self", ".", "_execute_callback", "(", "'success'", ",", "msg", ",", "job", ",", "res", ",", "None", ",", "None", ")", "finally", ":", "if", "timer", "is", "not", "None", ":", "timer", ".", "cancel", "(", ")" ]
Returns the span object of the term
def get_span_ids ( self ) : node_span = self . node . find ( 'span' ) if node_span is not None : mySpan = Cspan ( node_span ) span_ids = mySpan . get_span_ids ( ) return span_ids else : return [ ]
6,662
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/term_data.py#L177-L189
[ "def", "load_crmod_config", "(", "self", ",", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "fid", ":", "nr_of_configs", "=", "int", "(", "fid", ".", "readline", "(", ")", ".", "strip", "(", ")", ")", "configs", "=", "np", ".", "loadtxt", "(", "fid", ")", "print", "(", "'loaded configs:'", ",", "configs", ".", "shape", ")", "if", "nr_of_configs", "!=", "configs", ".", "shape", "[", "0", "]", ":", "raise", "Exception", "(", "'indicated number of measurements does not equal '", "+", "'to actual number of measurements'", ")", "ABMN", "=", "self", ".", "_crmod_to_abmn", "(", "configs", "[", ":", ",", "0", ":", "2", "]", ")", "self", ".", "configs", "=", "ABMN" ]
Sets the span for the term from list of ids
def set_span_from_ids ( self , span_list ) : this_span = Cspan ( ) this_span . create_from_ids ( span_list ) self . node . append ( this_span . get_node ( ) )
6,663
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/term_data.py#L191-L199
[ "def", "_validate", "(", "self", ")", ":", "for", "key", "in", "self", ":", "if", "key", "not", "in", "DEFAULTS", ":", "raise", "exceptions", ".", "ConfigurationException", "(", "'Unknown configuration key \"{}\"! Valid configuration keys are'", "\" {}\"", ".", "format", "(", "key", ",", "list", "(", "DEFAULTS", ".", "keys", "(", ")", ")", ")", ")", "validate_queues", "(", "self", "[", "\"queues\"", "]", ")", "validate_bindings", "(", "self", "[", "\"bindings\"", "]", ")", "validate_client_properties", "(", "self", "[", "\"client_properties\"", "]", ")" ]
Returns the term object for the supplied identifier
def get_term ( self , term_id ) : if term_id in self . idx : return Cterm ( self . idx [ term_id ] , self . type ) else : return None
6,664
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/term_data.py#L327-L336
[ "def", "_recv", "(", "self", ")", ":", "recvd", "=", "[", "]", "self", ".", "_lock", ".", "acquire", "(", ")", "if", "not", "self", ".", "_can_send_recv", "(", ")", ":", "log", ".", "warning", "(", "'%s cannot recv: socket not connected'", ",", "self", ")", "self", ".", "_lock", ".", "release", "(", ")", "return", "(", ")", "while", "len", "(", "recvd", ")", "<", "self", ".", "config", "[", "'sock_chunk_buffer_count'", "]", ":", "try", ":", "data", "=", "self", ".", "_sock", ".", "recv", "(", "self", ".", "config", "[", "'sock_chunk_bytes'", "]", ")", "# We expect socket.recv to raise an exception if there are no", "# bytes available to read from the socket in non-blocking mode.", "# but if the socket is disconnected, we will get empty data", "# without an exception raised", "if", "not", "data", ":", "log", ".", "error", "(", "'%s: socket disconnected'", ",", "self", ")", "self", ".", "_lock", ".", "release", "(", ")", "self", ".", "close", "(", "error", "=", "Errors", ".", "KafkaConnectionError", "(", "'socket disconnected'", ")", ")", "return", "[", "]", "else", ":", "recvd", ".", "append", "(", "data", ")", "except", "SSLWantReadError", ":", "break", "except", "ConnectionError", "as", "e", ":", "if", "six", ".", "PY2", "and", "e", ".", "errno", "==", "errno", ".", "EWOULDBLOCK", ":", "break", "log", ".", "exception", "(", "'%s: Error receiving network data'", "' closing socket'", ",", "self", ")", "self", ".", "_lock", ".", "release", "(", ")", "self", ".", "close", "(", "error", "=", "Errors", ".", "KafkaConnectionError", "(", "e", ")", ")", "return", "[", "]", "except", "BlockingIOError", ":", "if", "six", ".", "PY3", ":", "break", "self", ".", "_lock", ".", "release", "(", ")", "raise", "recvd_data", "=", "b''", ".", "join", "(", "recvd", ")", "if", "self", ".", "_sensors", ":", "self", ".", "_sensors", ".", "bytes_received", ".", "record", "(", "len", "(", "recvd_data", ")", ")", "try", ":", "responses", "=", "self", ".", "_protocol", ".", "receive_bytes", "(", "recvd_data", ")", "except", "Errors", ".", "KafkaProtocolError", "as", "e", ":", "self", ".", "_lock", ".", "release", "(", ")", "self", ".", "close", "(", "e", ")", "return", "[", "]", "else", ":", "self", ".", "_lock", ".", "release", "(", ")", "return", "responses" ]
Adds a term object to the layer
def add_term ( self , term_obj ) : if term_obj . get_id ( ) in self . idx : raise ValueError ( "Term with id {} already exists!" . format ( term_obj . get_id ( ) ) ) self . node . append ( term_obj . get_node ( ) ) self . idx [ term_obj . get_id ( ) ] = term_obj
6,665
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/term_data.py#L338-L348
[ "def", "launcher", "(", ")", ":", "parser", "=", "OptionParser", "(", ")", "parser", ".", "add_option", "(", "'-f'", ",", "'--file'", ",", "dest", "=", "'filename'", ",", "default", "=", "'agents.csv'", ",", "help", "=", "'snmposter configuration file'", ")", "options", ",", "args", "=", "parser", ".", "parse_args", "(", ")", "factory", "=", "SNMPosterFactory", "(", ")", "snmpd_status", "=", "subprocess", ".", "Popen", "(", "[", "\"service\"", ",", "\"snmpd\"", ",", "\"status\"", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", ".", "communicate", "(", ")", "[", "0", "]", "if", "\"is running\"", "in", "snmpd_status", ":", "message", "=", "\"snmd service is running. Please stop it and try again.\"", "print", ">>", "sys", ".", "stderr", ",", "message", "sys", ".", "exit", "(", "1", ")", "try", ":", "factory", ".", "configure", "(", "options", ".", "filename", ")", "except", "IOError", ":", "print", ">>", "sys", ".", "stderr", ",", "\"Error opening %s.\"", "%", "options", ".", "filename", "sys", ".", "exit", "(", "1", ")", "factory", ".", "start", "(", ")" ]
Adds an external reference for the given term
def add_external_reference ( self , term_id , external_ref ) : if term_id in self . idx : term_obj = Cterm ( self . idx [ term_id ] , self . type ) term_obj . add_external_reference ( external_ref ) else : print ( '{term_id} not in self.idx' . format ( * * locals ( ) ) )
6,666
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/term_data.py#L350-L362
[ "def", "_mk_tree", "(", "runas", "=", "'root'", ")", ":", "basedir", "=", "tempfile", ".", "mkdtemp", "(", ")", "paths", "=", "[", "'BUILD'", ",", "'RPMS'", ",", "'SOURCES'", ",", "'SPECS'", ",", "'SRPMS'", "]", "for", "path", "in", "paths", ":", "full", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "path", ")", "__salt__", "[", "'file.makedirs_perms'", "]", "(", "name", "=", "full", ",", "user", "=", "runas", ",", "group", "=", "'mock'", ")", "return", "basedir" ]
Create widget with segmentation labels information used to select labels .
def init_slab ( self , slab = None , segmentation = None , voxelsize_mm = None , show_ok_button = False ) : self . segmentation = segmentation self . voxelsize_mm = voxelsize_mm from . import show_segmentation self . slab = show_segmentation . create_slab_from_segmentation ( self . segmentation , slab = slab ) if show_ok_button : ok_button = QPushButton ( "Ok" ) ok_button . clicked . connect ( self . _action_ok_button ) self . superMainScrollLayout . addWidget ( ok_button )
6,667
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/select_label_qt.py#L47-L67
[ "def", "_timeout_from_retry_config", "(", "retry_params", ")", ":", "return", "timeout", ".", "ExponentialTimeout", "(", "initial", "=", "(", "retry_params", "[", "\"initial_rpc_timeout_millis\"", "]", "/", "_MILLIS_PER_SECOND", ")", ",", "maximum", "=", "(", "retry_params", "[", "\"max_rpc_timeout_millis\"", "]", "/", "_MILLIS_PER_SECOND", ")", ",", "multiplier", "=", "retry_params", "[", "\"rpc_timeout_multiplier\"", "]", ",", "deadline", "=", "(", "retry_params", "[", "\"total_timeout_millis\"", "]", "/", "_MILLIS_PER_SECOND", ")", ",", ")" ]
Gets a deep list of all Servers for a given Hardware Group and its sub groups or all Servers for a given location .
def GetServers ( location , group = None , alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if group : payload [ 'HardwareGroupUUID' ] = clc . v1 . Group . GetGroupUUID ( group , alias , location ) else : payload [ 'Location' ] = location try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , payload ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Servers' ] ) except Exception as e : if str ( e ) == "Hardware does not exist for location" : return ( [ ] ) else : raise
6,668
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L37-L57
[ "def", "user_agent", "(", "self", ",", "text", ",", "*", "*", "kwargs", ")", ":", "indicator_obj", "=", "UserAgent", "(", "text", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_indicator", "(", "indicator_obj", ")" ]
Gets a deep list of all Servers in all groups and datacenters .
def GetAllServers ( alias = None , name_groups = False ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) servers = [ ] clc . v1 . Account . GetLocations ( ) for location in clc . LOCATIONS : try : r = clc . v1 . API . Call ( 'post' , 'Server/GetAllServers' , { 'AccountAlias' : alias , 'Location' : location } , hide_errors = [ 5 , ] ) if name_groups : r [ 'Servers' ] = clc . v1 . Group . NameGroups ( r [ 'Servers' ] , 'HardwareGroupUUID' ) if int ( r [ 'StatusCode' ] ) == 0 : servers += r [ 'Servers' ] except : pass return ( servers )
6,669
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L61-L76
[ "def", "put_annotation", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "_check_ended", "(", ")", "if", "not", "isinstance", "(", "key", ",", "string_types", ")", ":", "log", ".", "warning", "(", "\"ignoring non string type annotation key with type %s.\"", ",", "type", "(", "key", ")", ")", "return", "if", "not", "isinstance", "(", "value", ",", "annotation_value_types", ")", ":", "log", ".", "warning", "(", "\"ignoring unsupported annotation value type %s.\"", ",", "type", "(", "value", ")", ")", "return", "if", "any", "(", "character", "not", "in", "_valid_annotation_key_characters", "for", "character", "in", "key", ")", ":", "log", ".", "warning", "(", "\"ignoring annnotation with unsupported characters in key: '%s'.\"", ",", "key", ")", "return", "self", ".", "annotations", "[", "key", "]", "=", "value" ]
Given a template name return the unique OperatingSystem ID .
def GetTemplateID ( alias , location , name ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Server . GetTemplates ( alias , location ) for row in r : if row [ 'Name' ] . lower ( ) == name . lower ( ) : return ( row [ 'OperatingSystem' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Template %s not found in account %s datacenter %s" % ( name , alias , location ) ) raise Exception ( "Template not found" )
6,670
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L96-L111
[ "def", "split", "(", "self", ",", "verbose", "=", "None", ",", "end_in_new_line", "=", "None", ")", ":", "elapsed_time", "=", "self", ".", "get_elapsed_time", "(", ")", "self", ".", "split_elapsed_time", ".", "append", "(", "elapsed_time", ")", "self", ".", "_cumulative_elapsed_time", "+=", "elapsed_time", "self", ".", "_elapsed_time", "=", "datetime", ".", "timedelta", "(", ")", "if", "verbose", "is", "None", ":", "verbose", "=", "self", ".", "verbose_end", "if", "verbose", ":", "if", "end_in_new_line", "is", "None", ":", "end_in_new_line", "=", "self", ".", "end_in_new_line", "if", "end_in_new_line", ":", "self", ".", "log", "(", "\"{} done in {}\"", ".", "format", "(", "self", ".", "description", ",", "elapsed_time", ")", ")", "else", ":", "self", ".", "log", "(", "\" done in {}\"", ".", "format", "(", "elapsed_time", ")", ")", "self", ".", "_start_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")" ]
Converts an existing server into a template .
def ConvertToTemplate ( server , template , password = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if password is None : password = clc . v1 . Server . GetCredentials ( [ server , ] , alias ) [ 0 ] [ 'Password' ] r = clc . v1 . API . Call ( 'post' , 'Server/ConvertServerToTemplate' , { 'AccountAlias' : alias , 'Name' : server , 'Password' : password , 'TemplateAlias' : template } ) return ( r )
6,671
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L146-L161
[ "def", "linear_rref", "(", "A", ",", "b", ",", "Matrix", "=", "None", ",", "S", "=", "None", ")", ":", "if", "Matrix", "is", "None", ":", "from", "sympy", "import", "Matrix", "if", "S", "is", "None", ":", "from", "sympy", "import", "S", "mat_rows", "=", "[", "_map2l", "(", "S", ",", "list", "(", "row", ")", "+", "[", "v", "]", ")", "for", "row", ",", "v", "in", "zip", "(", "A", ",", "b", ")", "]", "aug", "=", "Matrix", "(", "mat_rows", ")", "raug", ",", "pivot", "=", "aug", ".", "rref", "(", ")", "nindep", "=", "len", "(", "pivot", ")", "return", "raug", "[", ":", "nindep", ",", ":", "-", "1", "]", ",", "raug", "[", ":", "nindep", ",", "-", "1", "]" ]
Restores an archived server .
def RestoreServer ( server , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) if re . match ( "([a-zA-Z0-9]){32}" , group . replace ( "-" , "" ) ) : groups_uuid = group else : groups_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Server/RestoreServer' , { 'AccountAlias' : alias , 'Name' : server , 'HardwareGroupUUID' : groups_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
6,672
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L165-L182
[ "def", "import_key_pair", "(", "name", ",", "key", ",", "profile", ",", "key_type", "=", "None", ",", "*", "*", "libcloud_kwargs", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "libcloud_kwargs", "=", "salt", ".", "utils", ".", "args", ".", "clean_kwargs", "(", "*", "*", "libcloud_kwargs", ")", "if", "os", ".", "path", ".", "exists", "(", "key", ")", "or", "key_type", "==", "'FILE'", ":", "return", "_simple_key_pair", "(", "conn", ".", "import_key_pair_from_file", "(", "name", ",", "key", ",", "*", "*", "libcloud_kwargs", ")", ")", "else", ":", "return", "_simple_key_pair", "(", "conn", ".", "import_key_pair_from_string", "(", "name", ",", "key", ",", "*", "*", "libcloud_kwargs", ")", ")" ]
Archives the specified servers .
def _ServerActions ( action , alias , servers ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) results = [ ] for server in servers : r = clc . v1 . API . Call ( 'post' , 'Server/%sServer' % ( action ) , { 'AccountAlias' : alias , 'Name' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : results . append ( r ) return ( results )
6,673
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L187-L199
[ "def", "setOverlayTextureBounds", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTextureBounds", "pOverlayTextureBounds", "=", "VRTextureBounds_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "pOverlayTextureBounds", ")", ")", "return", "result", ",", "pOverlayTextureBounds" ]
Returns list of disks configured for the server
def GetDisks ( server , alias = None , guest_names = True ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/ListDisks' , { 'AccountAlias' : alias , 'Name' : server , 'QueryGuestDiskNames' : guest_names } ) return ( r [ 'Disks' ] )
6,674
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L329-L341
[ "def", "send_audio", "(", "self", ",", "ws", ")", ":", "def", "run", "(", "*", "args", ")", ":", "\"\"\"Background process to stream the data\"\"\"", "if", "not", "self", ".", "audio_source", ".", "is_buffer", ":", "while", "True", ":", "chunk", "=", "self", ".", "audio_source", ".", "input", ".", "read", "(", "ONE_KB", ")", "if", "not", "chunk", ":", "break", "self", ".", "ws_client", ".", "send", "(", "chunk", ",", "websocket", ".", "ABNF", ".", "OPCODE_BINARY", ")", "time", ".", "sleep", "(", "TEN_MILLISECONDS", ")", "self", ".", "audio_source", ".", "input", ".", "close", "(", ")", "else", ":", "while", "True", ":", "try", ":", "if", "not", "self", ".", "audio_source", ".", "input", ".", "empty", "(", ")", ":", "chunk", "=", "self", ".", "audio_source", ".", "input", ".", "get", "(", ")", "self", ".", "ws_client", ".", "send", "(", "chunk", ",", "websocket", ".", "ABNF", ".", "OPCODE_BINARY", ")", "time", ".", "sleep", "(", "TEN_MILLISECONDS", ")", "if", "self", ".", "audio_source", ".", "input", ".", "empty", "(", ")", ":", "if", "self", ".", "audio_source", ".", "is_recording", ":", "time", ".", "sleep", "(", "TEN_MILLISECONDS", ")", "else", ":", "break", "except", "Exception", ":", "if", "self", ".", "audio_source", ".", "is_recording", ":", "time", ".", "sleep", "(", "TEN_MILLISECONDS", ")", "else", ":", "break", "time", ".", "sleep", "(", "TEN_MILLISECONDS", ")", "self", ".", "ws_client", ".", "send", "(", "self", ".", "build_closing_message", "(", ")", ",", "websocket", ".", "ABNF", ".", "OPCODE_TEXT", ")", "thread", ".", "start_new_thread", "(", "run", ",", "(", ")", ")" ]
Deletes the specified disk .
def DeleteDisk ( server , scsi_bus_id , scsi_device_id , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Server/DeleteDisk' , { 'AccountAlias' : alias , 'Name' : server , 'OverrideFailsafes' : True , 'ScsiBusID' : scsi_bus_id , 'ScsiDeviceID' : scsi_device_id } ) return ( r )
6,675
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/server.py#L345-L360
[ "def", "fetch", "(", "self", ",", "endpoint", ",", "data", "=", "None", ")", ":", "payload", "=", "{", "\"lastServerChangeId\"", ":", "\"-1\"", ",", "\"csrf\"", ":", "self", ".", "__csrf", ",", "\"apiClient\"", ":", "\"WEB\"", "}", "if", "data", "is", "not", "None", ":", "payload", ".", "update", "(", "data", ")", "return", "self", ".", "post", "(", "endpoint", ",", "payload", ")" ]
Iterator that returns all the external reference objects of the external references object
def get_external_references ( self ) : for ext_ref_node in self . node . findall ( 'externalRef' ) : ext_refs_obj = CexternalReference ( ext_ref_node ) for ref in ext_refs_obj : yield ref
6,676
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/external_references_data.py#L184-L193
[ "async", "def", "commission", "(", "self", ",", "*", ",", "enable_ssh", ":", "bool", "=", "None", ",", "skip_networking", ":", "bool", "=", "None", ",", "skip_storage", ":", "bool", "=", "None", ",", "commissioning_scripts", ":", "typing", ".", "Sequence", "[", "str", "]", "=", "None", ",", "testing_scripts", ":", "typing", ".", "Sequence", "[", "str", "]", "=", "None", ",", "wait", ":", "bool", "=", "False", ",", "wait_interval", ":", "int", "=", "5", ")", ":", "params", "=", "{", "\"system_id\"", ":", "self", ".", "system_id", "}", "if", "enable_ssh", "is", "not", "None", ":", "params", "[", "\"enable_ssh\"", "]", "=", "enable_ssh", "if", "skip_networking", "is", "not", "None", ":", "params", "[", "\"skip_networking\"", "]", "=", "skip_networking", "if", "skip_storage", "is", "not", "None", ":", "params", "[", "\"skip_storage\"", "]", "=", "skip_storage", "if", "(", "commissioning_scripts", "is", "not", "None", "and", "len", "(", "commissioning_scripts", ")", ">", "0", ")", ":", "params", "[", "\"commissioning_scripts\"", "]", "=", "\",\"", ".", "join", "(", "commissioning_scripts", ")", "if", "testing_scripts", "is", "not", "None", ":", "if", "len", "(", "testing_scripts", ")", "==", "0", "or", "testing_scripts", "==", "\"none\"", ":", "params", "[", "\"testing_scripts\"", "]", "=", "[", "\"none\"", "]", "else", ":", "params", "[", "\"testing_scripts\"", "]", "=", "\",\"", ".", "join", "(", "testing_scripts", ")", "self", ".", "_data", "=", "await", "self", ".", "_handler", ".", "commission", "(", "*", "*", "params", ")", "if", "not", "wait", ":", "return", "self", "else", ":", "# Wait for the machine to be fully commissioned.", "while", "self", ".", "status", "in", "[", "NodeStatus", ".", "COMMISSIONING", ",", "NodeStatus", ".", "TESTING", "]", ":", "await", "asyncio", ".", "sleep", "(", "wait_interval", ")", "self", ".", "_data", "=", "await", "self", ".", "_handler", ".", "read", "(", "system_id", "=", "self", ".", "system_id", ")", "if", "self", ".", "status", "==", "NodeStatus", ".", "FAILED_COMMISSIONING", ":", "msg", "=", "\"{hostname} failed to commission.\"", ".", "format", "(", "hostname", "=", "self", ".", "hostname", ")", "raise", "FailedCommissioning", "(", "msg", ",", "self", ")", "if", "self", ".", "status", "==", "NodeStatus", ".", "FAILED_TESTING", ":", "msg", "=", "\"{hostname} failed testing.\"", ".", "format", "(", "hostname", "=", "self", ".", "hostname", ")", "raise", "FailedTesting", "(", "msg", ",", "self", ")", "return", "self" ]
Set the identifier for the token
def set_id ( self , this_id ) : if self . type == 'NAF' : return self . node . set ( 'id' , this_id ) elif self . type == 'KAF' : return self . node . set ( 'wid' , this_id )
6,677
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/text_data.py#L34-L43
[ "def", "assert_shape_match", "(", "shape1", ",", "shape2", ")", ":", "shape1", "=", "tf", ".", "TensorShape", "(", "shape1", ")", "shape2", "=", "tf", ".", "TensorShape", "(", "shape2", ")", "if", "shape1", ".", "ndims", "is", "None", "or", "shape2", ".", "ndims", "is", "None", ":", "raise", "ValueError", "(", "'Shapes must have known rank. Got %s and %s.'", "%", "(", "shape1", ".", "ndims", ",", "shape2", ".", "ndims", ")", ")", "shape1", ".", "assert_same_rank", "(", "shape2", ")", "shape1", ".", "assert_is_compatible_with", "(", "shape2", ")" ]
Converts the object to NAF
def to_naf ( self ) : if self . type == 'KAF' : self . type = 'NAF' for node in self . __get_wf_nodes ( ) : node . set ( 'id' , node . get ( 'wid' ) ) del node . attrib [ 'wid' ]
6,678
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/text_data.py#L212-L220
[ "def", "ls", "(", "url", "=", "'http://localhost:8080/manager'", ",", "timeout", "=", "180", ")", ":", "ret", "=", "{", "}", "data", "=", "_wget", "(", "'list'", ",", "''", ",", "url", ",", "timeout", "=", "timeout", ")", "if", "data", "[", "'res'", "]", "is", "False", ":", "return", "{", "}", "data", "[", "'msg'", "]", ".", "pop", "(", "0", ")", "for", "line", "in", "data", "[", "'msg'", "]", ":", "tmp", "=", "line", ".", "split", "(", "':'", ")", "ret", "[", "tmp", "[", "0", "]", "]", "=", "{", "'mode'", ":", "tmp", "[", "1", "]", ",", "'sessions'", ":", "tmp", "[", "2", "]", ",", "'fullname'", ":", "tmp", "[", "3", "]", ",", "'version'", ":", "''", ",", "}", "sliced", "=", "tmp", "[", "3", "]", ".", "split", "(", "'##'", ")", "if", "len", "(", "sliced", ")", ">", "1", ":", "ret", "[", "tmp", "[", "0", "]", "]", "[", "'version'", "]", "=", "sliced", "[", "1", "]", "return", "ret" ]
Returns the token object for the given token identifier
def get_wf ( self , token_id ) : wf_node = self . idx . get ( token_id ) if wf_node is not None : return Cwf ( node = wf_node , type = self . type ) else : for wf_node in self . __get_wf_nodes ( ) : if self . type == 'NAF' : label_id = 'id' elif self . type == 'KAF' : label_id = 'wid' if wf_node . get ( label_id ) == token_id : return Cwf ( node = wf_node , type = self . type ) return None
6,679
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/text_data.py#L235-L252
[ "def", "_apply_mask", "(", "self", ")", ":", "w", "=", "self", ".", "_w", "w_shape", "=", "w", ".", "get_shape", "(", ")", "mask_shape", "=", "self", ".", "_mask", ".", "get_shape", "(", ")", "if", "mask_shape", ".", "ndims", ">", "w_shape", ".", "ndims", ":", "raise", "base", ".", "IncompatibleShapeError", "(", "\"Invalid mask shape: {}. Max shape: {}\"", ".", "format", "(", "mask_shape", ".", "ndims", ",", "len", "(", "self", ".", "_data_format", ")", ")", ")", "if", "mask_shape", "!=", "w_shape", "[", ":", "mask_shape", ".", "ndims", "]", ":", "raise", "base", ".", "IncompatibleShapeError", "(", "\"Invalid mask shape: {}. Weight shape: {}\"", ".", "format", "(", "mask_shape", ",", "w_shape", ")", ")", "# TF broadcasting is a bit fragile.", "# Expand the shape of self._mask by one dim at a time to the right", "# until the rank matches `weight_shape`.", "while", "self", ".", "_mask", ".", "get_shape", "(", ")", ".", "ndims", "<", "w_shape", ".", "ndims", ":", "self", ".", "_mask", "=", "tf", ".", "expand_dims", "(", "self", ".", "_mask", ",", "-", "1", ")", "# tf.Variable & tf.ResourceVariable don't support *=.", "w", "=", "w", "*", "self", ".", "_mask", "# pylint: disable=g-no-augmented-assignment", "return", "w" ]
Adds a token object to the text layer
def add_wf ( self , wf_obj ) : if wf_obj . get_id ( ) in self . idx : raise ValueError ( "Text node (wf) with id {} already exists!" . format ( wf_obj . get_id ( ) ) ) self . node . append ( wf_obj . get_node ( ) ) self . idx [ wf_obj . get_id ( ) ] = wf_obj
6,680
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/text_data.py#L254-L264
[ "def", "check_symmetry", "(", "A", ")", ":", "A", "=", "asanyarray", "(", "A", ")", "if", "A", ".", "ndim", "!=", "2", ":", "raise", "ValueError", "(", "\"Checks symmetry only for bi-dimensional arrays.\"", ")", "if", "A", ".", "shape", "[", "0", "]", "!=", "A", ".", "shape", "[", "1", "]", ":", "return", "False", "return", "abs", "(", "A", "-", "A", ".", "T", ")", ".", "max", "(", ")", "<", "sqrt", "(", "finfo", "(", "float", ")", ".", "eps", ")" ]
Removes the tokens of the given sentence
def remove_tokens_of_sentence ( self , sentence_id ) : nodes_to_remove = set ( ) for wf in self : if wf . get_sent ( ) == sentence_id : nodes_to_remove . add ( wf . get_node ( ) ) for node in nodes_to_remove : self . node . remove ( node )
6,681
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/text_data.py#L267-L279
[ "def", "_include_public_methods", "(", "self", ",", "connection", ")", ":", "for", "attr", "in", "dir", "(", "connection", ")", ":", "value", "=", "getattr", "(", "connection", ",", "attr", ")", "if", "attr", ".", "startswith", "(", "'_'", ")", "or", "not", "callable", "(", "value", ")", ":", "continue", "self", ".", "__dict__", "[", "attr", "]", "=", "self", ".", "_wrap_public_method", "(", "attr", ")" ]
Aggregate images by day without using joins
def aggregate_daily ( image_coll , start_date = None , end_date = None , agg_type = 'mean' ) : if start_date and end_date : test_coll = image_coll . filterDate ( ee . Date ( start_date ) , ee . Date ( end_date ) ) elif start_date : test_coll = image_coll . filter ( ee . Filter . greaterThanOrEquals ( 'system:time_start' , ee . Date ( start_date ) . millis ( ) ) ) elif end_date : test_coll = image_coll . filter ( ee . Filter . lessThan ( 'system:time_start' , ee . Date ( end_date ) . millis ( ) ) ) else : test_coll = image_coll # Build a list of dates in the image_coll def get_date ( time ) : return ee . Date ( ee . Number ( time ) ) . format ( 'yyyy-MM-dd' ) date_list = ee . List ( test_coll . aggregate_array ( 'system:time_start' ) ) . map ( get_date ) . distinct ( ) . sort ( ) def aggregate_func ( date_str ) : start_date = ee . Date ( ee . String ( date_str ) ) end_date = start_date . advance ( 1 , 'day' ) agg_coll = image_coll . filterDate ( start_date , end_date ) # if agg_type.lower() == 'mean': agg_img = agg_coll . mean ( ) # elif agg_type.lower() == 'median': # agg_img = agg_coll.median() return agg_img . set ( { 'system:index' : start_date . format ( 'yyyyMMdd' ) , 'system:time_start' : start_date . millis ( ) , 'date' : start_date . format ( 'yyyy-MM-dd' ) , } ) return ee . ImageCollection ( date_list . map ( aggregate_func ) )
6,682
https://github.com/Open-ET/openet-core-beta/blob/f2b81ccf87bf7e7fe1b9f3dd1d4081d0ec7852db/openet/core/interp.py#L161-L227
[ "def", "is_rate_matrix", "(", "K", ",", "tol", "=", "1e-12", ")", ":", "K", "=", "_types", ".", "ensure_ndarray_or_sparse", "(", "K", ",", "ndim", "=", "2", ",", "uniform", "=", "True", ",", "kind", "=", "'numeric'", ")", "if", "_issparse", "(", "K", ")", ":", "return", "sparse", ".", "assessment", ".", "is_rate_matrix", "(", "K", ",", "tol", ")", "else", ":", "return", "dense", ".", "assessment", ".", "is_rate_matrix", "(", "K", ",", "tol", ")" ]
Removes the clink for the given clink identifier
def remove_this_clink ( self , clink_id ) : for clink in self . get_clinks ( ) : if clink . get_id ( ) == clink_id : self . node . remove ( clink . get_node ( ) ) break
6,683
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/causal_data.py#L156-L165
[ "def", "set_json", "(", "self", ",", "reason", "=", "''", ",", "new_page", "=", "False", ")", ":", "compressed_json", "=", "json", ".", "dumps", "(", "self", ".", "_compress_json", "(", "self", ".", "cached_json", ")", ")", "if", "len", "(", "compressed_json", ")", ">", "self", ".", "max_page_size", ":", "raise", "OverflowError", "(", "'Usernotes page is too large (>{0} characters)'", ".", "format", "(", "self", ".", "max_page_size", ")", ")", "if", "new_page", ":", "self", ".", "subreddit", ".", "wiki", ".", "create", "(", "self", ".", "page_name", ",", "compressed_json", ",", "reason", ")", "# Set the page as hidden and available to moderators only", "self", ".", "subreddit", ".", "wiki", "[", "self", ".", "page_name", "]", ".", "mod", ".", "update", "(", "False", ",", "permlevel", "=", "2", ")", "else", ":", "self", ".", "subreddit", ".", "wiki", "[", "self", ".", "page_name", "]", ".", "edit", "(", "compressed_json", ",", "reason", ")" ]
Gets estimated costs for a group of servers .
def GetGroupEstimate ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) group_uuid = clc . v1 . Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupEstimate' , { 'AccountAlias' : alias , 'HardwareGroupUUID' : group_uuid } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
6,684
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/billing.py#L14-L29
[ "def", "_stream_annotation", "(", "file_name", ",", "pb_dir", ")", ":", "# Full url of annotation file", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "pb_dir", ",", "file_name", ")", "# Get the content", "response", "=", "requests", ".", "get", "(", "url", ")", "# Raise HTTPError if invalid url", "response", ".", "raise_for_status", "(", ")", "# Convert to numpy array", "ann_data", "=", "np", ".", "fromstring", "(", "response", ".", "content", ",", "dtype", "=", "np", ".", "dtype", "(", "'<u1'", ")", ")", "return", "ann_data" ]
Gets the charges for groups and servers within a given account and for any date range .
def GetGroupSummaries ( alias = None , date_start = None , date_end = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) payload = { 'AccountAlias' : alias } if date_start is not None : payload [ 'StartDate' ] = date_start if date_end is not None : payload [ 'EndDate' ] = date_end r = clc . v1 . API . Call ( 'post' , 'Billing/GetGroupSummaries' , payload ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'GroupTotals' ] )
6,685
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/billing.py#L34-L50
[ "def", "save_to_file", "(", "self", ",", "filename", ",", "remap_dim0", "=", "None", ",", "remap_dim1", "=", "None", ")", ":", "# rows - first index", "# columns - second index", "with", "open", "(", "filename", ",", "'w'", ")", "as", "fobj", ":", "columns", "=", "list", "(", "sorted", "(", "self", ".", "_dim1", ")", ")", "for", "col", "in", "columns", ":", "fobj", ".", "write", "(", "','", ")", "fobj", ".", "write", "(", "str", "(", "remap_dim1", "[", "col", "]", "if", "remap_dim1", "else", "col", ")", ")", "fobj", ".", "write", "(", "'\\n'", ")", "for", "row", "in", "sorted", "(", "self", ".", "_dim0", ")", ":", "fobj", ".", "write", "(", "str", "(", "remap_dim0", "[", "row", "]", "if", "remap_dim0", "else", "row", ")", ")", "for", "col", "in", "columns", ":", "fobj", ".", "write", "(", "','", ")", "fobj", ".", "write", "(", "str", "(", "self", "[", "row", ",", "col", "]", ")", ")", "fobj", ".", "write", "(", "'\\n'", ")" ]
Gets the estimated monthly cost for a given server .
def GetServerEstimate ( server , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) r = clc . v1 . API . Call ( 'post' , 'Billing/GetServerEstimate' , { 'AccountAlias' : alias , 'ServerName' : server } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
6,686
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/billing.py#L54-L65
[ "def", "swd_sync", "(", "self", ",", "pad", "=", "False", ")", ":", "if", "pad", ":", "self", ".", "_dll", ".", "JLINK_SWD_SyncBytes", "(", ")", "else", ":", "self", ".", "_dll", ".", "JLINK_SWD_SyncBits", "(", ")", "return", "None" ]
Displays the output of get_direct_config formatted nicely
def display_direct ( ) : include_dirs , runtime_dirs , runtime = get_direct_config ( ) print ( "Include Search Dirs: {}" . format ( include_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( include_dirs ) ) ) print ( "Runtime Search Dirs: {}" . format ( runtime_dirs ) ) print ( "\tContents: {}\n" . format ( get_dir_contents ( runtime_dirs ) ) ) print ( "Runtime Libs: '{}'" . format ( runtime ) )
6,687
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/direct.py#L20-L30
[ "def", "mutate", "(", "self", ",", "mutation", "=", "None", ",", "set_obj", "=", "None", ",", "del_obj", "=", "None", ",", "set_nquads", "=", "None", ",", "del_nquads", "=", "None", ",", "commit_now", "=", "None", ",", "ignore_index_conflict", "=", "None", ",", "timeout", "=", "None", ",", "metadata", "=", "None", ",", "credentials", "=", "None", ")", ":", "mutation", "=", "self", ".", "_common_mutate", "(", "mutation", "=", "mutation", ",", "set_obj", "=", "set_obj", ",", "del_obj", "=", "del_obj", ",", "set_nquads", "=", "set_nquads", ",", "del_nquads", "=", "del_nquads", ",", "commit_now", "=", "commit_now", ",", "ignore_index_conflict", "=", "ignore_index_conflict", ")", "new_metadata", "=", "self", ".", "_dg", ".", "add_login_metadata", "(", "metadata", ")", "mutate_error", "=", "None", "try", ":", "assigned", "=", "self", ".", "_dc", ".", "mutate", "(", "mutation", ",", "timeout", "=", "timeout", ",", "metadata", "=", "new_metadata", ",", "credentials", "=", "credentials", ")", "except", "Exception", "as", "error", ":", "if", "util", ".", "is_jwt_expired", "(", "error", ")", ":", "self", ".", "_dg", ".", "retry_login", "(", ")", "new_metadata", "=", "self", ".", "_dg", ".", "add_login_metadata", "(", "metadata", ")", "try", ":", "assigned", "=", "self", ".", "_dc", ".", "mutate", "(", "mutation", ",", "timeout", "=", "timeout", ",", "metadata", "=", "new_metadata", ",", "credentials", "=", "credentials", ")", "except", "Exception", "as", "error", ":", "mutate_error", "=", "error", "else", ":", "mutate_error", "=", "error", "if", "mutate_error", "is", "not", "None", ":", "try", ":", "self", ".", "discard", "(", "timeout", "=", "timeout", ",", "metadata", "=", "metadata", ",", "credentials", "=", "credentials", ")", "except", ":", "# Ignore error - user should see the original error.", "pass", "self", ".", "_common_except_mutate", "(", "mutate_error", ")", "if", "mutation", ".", "commit_now", ":", "self", ".", "_finished", "=", "True", "self", ".", "merge_context", "(", "assigned", ".", "context", ")", "return", "assigned" ]
Save model to pickle file
def save ( self , filename = 'saved.ol.p' ) : import dill as pickle sv = { # 'feature_function': self.feature_function, 'cl' : self . cl } pickle . dump ( sv , open ( filename , "wb" ) )
6,688
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/trainer3d.py#L36-L46
[ "def", "Nu_vertical_cylinder", "(", "Pr", ",", "Gr", ",", "L", "=", "None", ",", "D", "=", "None", ",", "Method", "=", "None", ",", "AvailableMethods", "=", "False", ")", ":", "def", "list_methods", "(", ")", ":", "methods", "=", "[", "]", "for", "key", ",", "values", "in", "vertical_cylinder_correlations", ".", "items", "(", ")", ":", "if", "values", "[", "4", "]", "or", "all", "(", "(", "L", ",", "D", ")", ")", ":", "methods", ".", "append", "(", "key", ")", "if", "'Popiel & Churchill'", "in", "methods", ":", "methods", ".", "remove", "(", "'Popiel & Churchill'", ")", "methods", ".", "insert", "(", "0", ",", "'Popiel & Churchill'", ")", "elif", "'McAdams, Weiss & Saunders'", "in", "methods", ":", "methods", ".", "remove", "(", "'McAdams, Weiss & Saunders'", ")", "methods", ".", "insert", "(", "0", ",", "'McAdams, Weiss & Saunders'", ")", "return", "methods", "if", "AvailableMethods", ":", "return", "list_methods", "(", ")", "if", "not", "Method", ":", "Method", "=", "list_methods", "(", ")", "[", "0", "]", "if", "Method", "in", "vertical_cylinder_correlations", ":", "if", "vertical_cylinder_correlations", "[", "Method", "]", "[", "4", "]", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ")", "else", ":", "return", "vertical_cylinder_correlations", "[", "Method", "]", "[", "0", "]", "(", "Pr", "=", "Pr", ",", "Gr", "=", "Gr", ",", "L", "=", "L", ",", "D", "=", "D", ")", "else", ":", "raise", "Exception", "(", "\"Correlation name not recognized; see the \"", "\"documentation for the available options.\"", ")" ]
Given a group name return the unique group ID .
def GetGroupUUID ( group , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = Group . GetGroups ( location , alias ) for row in r : if row [ 'Name' ] == group : return ( row [ 'UUID' ] ) else : if clc . args : clc . v1 . output . Status ( "ERROR" , 3 , "Group %s not found in account %s datacenter %s" % ( group , alias , location ) ) raise Exception ( "Group not found" )
6,689
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/group.py#L14-L28
[ "def", "wet_bulb_temperature", "(", "pressure", ",", "temperature", ",", "dewpoint", ")", ":", "if", "not", "hasattr", "(", "pressure", ",", "'shape'", ")", ":", "pressure", "=", "atleast_1d", "(", "pressure", ")", "temperature", "=", "atleast_1d", "(", "temperature", ")", "dewpoint", "=", "atleast_1d", "(", "dewpoint", ")", "it", "=", "np", ".", "nditer", "(", "[", "pressure", ",", "temperature", ",", "dewpoint", ",", "None", "]", ",", "op_dtypes", "=", "[", "'float'", ",", "'float'", ",", "'float'", ",", "'float'", "]", ",", "flags", "=", "[", "'buffered'", "]", ")", "for", "press", ",", "temp", ",", "dewp", ",", "ret", "in", "it", ":", "press", "=", "press", "*", "pressure", ".", "units", "temp", "=", "temp", "*", "temperature", ".", "units", "dewp", "=", "dewp", "*", "dewpoint", ".", "units", "lcl_pressure", ",", "lcl_temperature", "=", "lcl", "(", "press", ",", "temp", ",", "dewp", ")", "moist_adiabat_temperatures", "=", "moist_lapse", "(", "concatenate", "(", "[", "lcl_pressure", ",", "press", "]", ")", ",", "lcl_temperature", ")", "ret", "[", "...", "]", "=", "moist_adiabat_temperatures", "[", "-", "1", "]", "# If we started with a scalar, return a scalar", "if", "it", ".", "operands", "[", "3", "]", ".", "size", "==", "1", ":", "return", "it", ".", "operands", "[", "3", "]", "[", "0", "]", "*", "moist_adiabat_temperatures", ".", "units", "return", "it", ".", "operands", "[", "3", "]", "*", "moist_adiabat_temperatures", ".", "units" ]
Get group name associated with ID .
def NameGroups ( data_arr , id_key ) : new_data_arr = [ ] for data in data_arr : try : data_arr [ id_key ] = clc . _GROUP_MAPPING [ data [ id_key ] ] except : pass new_data_arr . append ( data ) if clc . args : clc . v1 . output . Status ( "ERROR" , 2 , "Group name conversion not yet implemented" ) return ( new_data_arr )
6,690
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/group.py#L33-L46
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Return all of alias groups in the given location .
def GetGroups ( location = None , alias = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = clc . v1 . API . Call ( 'post' , 'Group/GetGroups' , { 'AccountAlias' : alias , 'Location' : location } ) for group in r [ 'HardwareGroups' ] : clc . _GROUP_MAPPING [ group [ 'UUID' ] ] = group [ 'Name' ] if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'HardwareGroups' ] )
6,691
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/group.py#L50-L62
[ "def", "trace_integration", "(", "tracer", "=", "None", ")", ":", "log", ".", "info", "(", "'Integrated module: {}'", ".", "format", "(", "MODULE_NAME", ")", ")", "# Wrap the httplib request function", "request_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_REQUEST_FUNC", ")", "wrapped_request", "=", "wrap_httplib_request", "(", "request_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "request_func", ".", "__name__", ",", "wrapped_request", ")", "# Wrap the httplib response function", "response_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_RESPONSE_FUNC", ")", "wrapped_response", "=", "wrap_httplib_response", "(", "response_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "response_func", ".", "__name__", ",", "wrapped_response", ")" ]
Applies group level actions .
def _GroupActions ( action , group , alias , location ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) groups_uuid = Group . GetGroupUUID ( group , alias , location ) r = clc . v1 . API . Call ( 'post' , 'Group/%sHardwareGroup' % ( action ) , { 'UUID' : groups_uuid , 'AccountAlias' : alias } ) return ( r )
6,692
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/group.py#L89-L102
[ "def", "add_json_mask", "(", "self", ",", "start", ",", "method_str", ",", "json_producer", ")", ":", "def", "send_json", "(", "drh", ",", "rem_path", ")", ":", "obj", "=", "json_producer", "(", "drh", ",", "rem_path", ")", "if", "not", "isinstance", "(", "obj", ",", "Response", ")", ":", "obj", "=", "Response", "(", "obj", ")", "ctype", "=", "obj", ".", "get_ctype", "(", "\"application/json\"", ")", "code", "=", "obj", ".", "code", "obj", "=", "obj", ".", "response", "if", "obj", "is", "None", ":", "drh", ".", "send_error", "(", "404", ",", "\"File not found\"", ")", "return", "None", "f", "=", "BytesIO", "(", ")", "json_str", "=", "json_dumps", "(", "obj", ")", "if", "isinstance", "(", "json_str", ",", "(", "str", ",", "unicode", ")", ")", ":", "try", ":", "json_str", "=", "json_str", ".", "decode", "(", "'utf8'", ")", "except", "AttributeError", ":", "pass", "json_str", "=", "json_str", ".", "encode", "(", "'utf8'", ")", "f", ".", "write", "(", "json_str", ")", "f", ".", "flush", "(", ")", "size", "=", "f", ".", "tell", "(", ")", "f", ".", "seek", "(", "0", ")", "# handle ETag caching", "if", "drh", ".", "request_version", ">=", "\"HTTP/1.1\"", ":", "e_tag", "=", "\"{0:x}\"", ".", "format", "(", "zlib", ".", "crc32", "(", "f", ".", "read", "(", ")", ")", "&", "0xFFFFFFFF", ")", "f", ".", "seek", "(", "0", ")", "match", "=", "_getheader", "(", "drh", ".", "headers", ",", "'if-none-match'", ")", "if", "match", "is", "not", "None", ":", "if", "drh", ".", "check_cache", "(", "e_tag", ",", "match", ")", ":", "f", ".", "close", "(", ")", "return", "None", "drh", ".", "send_header", "(", "\"ETag\"", ",", "e_tag", ",", "end_header", "=", "True", ")", "drh", ".", "send_header", "(", "\"Cache-Control\"", ",", "\"max-age={0}\"", ".", "format", "(", "self", ".", "max_age", ")", ",", "end_header", "=", "True", ")", "drh", ".", "send_response", "(", "code", ")", "drh", ".", "send_header", "(", "\"Content-Type\"", ",", "ctype", ")", "drh", ".", "send_header", "(", "\"Content-Length\"", ",", "size", ")", "drh", ".", "end_headers", "(", ")", "return", "f", "self", ".", "_add_file_mask", "(", "start", ",", "method_str", ",", "send_json", ")" ]
Extracts endpoint and path from the request URL .
def get_endpoint_and_path ( environ ) : path = environ [ 'PATH_INFO' ] components = path . split ( '/' ) if '..' in components : raise HttpError ( '400 Bad Request' , 'Path cannot contain "..".' ) # Strip closing slash if components and components [ - 1 ] == '' : components . pop ( ) # If path contained '//', get the segment after the last occurence try : first = _rindex ( components , '' ) + 1 except ValueError : first = 0 components = components [ first : ] if len ( components ) == 0 : return '' , '' else : return components [ 0 ] , '/' . join ( components [ 1 : ] )
6,693
https://github.com/sio2project/filetracker/blob/359b474850622e3d0c25ee2596d7242c02f84efb/filetracker/servers/base.py#L63-L90
[ "def", "add_fluctuations", "(", "hdf5_file", ",", "N_columns", ",", "N_processes", ")", ":", "random_state", "=", "np", ".", "random", ".", "RandomState", "(", "0", ")", "slice_queue", "=", "multiprocessing", ".", "JoinableQueue", "(", ")", "pid_list", "=", "[", "]", "for", "i", "in", "range", "(", "N_processes", ")", ":", "worker", "=", "Fluctuations_worker", "(", "hdf5_file", ",", "'/aff_prop_group/similarities'", ",", "random_state", ",", "N_columns", ",", "slice_queue", ")", "worker", ".", "daemon", "=", "True", "worker", ".", "start", "(", ")", "pid_list", ".", "append", "(", "worker", ".", "pid", ")", "for", "rows_slice", "in", "chunk_generator", "(", "N_columns", ",", "4", "*", "N_processes", ")", ":", "slice_queue", ".", "put", "(", "rows_slice", ")", "slice_queue", ".", "join", "(", ")", "slice_queue", ".", "close", "(", ")", "terminate_processes", "(", "pid_list", ")", "gc", ".", "collect", "(", ")" ]
convenience function for packing
def pack ( self ) : block = bytearray ( self . size ) self . pack_into ( block ) return block
6,694
https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/cpiofile.py#L108-L112
[ "def", "init_db", "(", "self", ")", ":", "with", "self", ".", "conn", ".", "transaction", "(", ")", "as", "t", ":", "t", ".", "execute", "(", "'''\n CREATE TABLE temporal (\n tuid INTEGER,\n revision CHAR(12) NOT NULL,\n file TEXT,\n line INTEGER\n );'''", ")", "t", ".", "execute", "(", "'''\n CREATE TABLE annotations (\n revision CHAR(12) NOT NULL,\n file TEXT,\n annotation TEXT,\n PRIMARY KEY(revision, file)\n );'''", ")", "# Used in frontier updating", "t", ".", "execute", "(", "'''\n CREATE TABLE latestFileMod (\n file TEXT,\n revision CHAR(12) NOT NULL,\n PRIMARY KEY(file)\n );'''", ")", "t", ".", "execute", "(", "\"CREATE UNIQUE INDEX temporal_rev_file ON temporal(revision, file, line)\"", ")", "Log", ".", "note", "(", "\"Tables created successfully\"", ")" ]
predicate indicating whether a block of memory includes a magic number
def encoded_class ( block , offset = 0 ) : if not block : raise InvalidFileFormatNull for key in __magicmap__ : if block . find ( key , offset , offset + len ( key ) ) > - 1 : return __magicmap__ [ key ] raise InvalidFileFormat
6,695
https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/cpiofile.py#L295-L306
[ "def", "reinit_index", "(", "index", "=", "INDEX_NAME", ")", ":", "es_conn", ".", "indices", ".", "delete", "(", "index", ",", "ignore", "=", "404", ")", "try", ":", "es_conn", ".", "indices", ".", "create", "(", "index", ",", "INDEX_SETTINGS", ".", "get", "(", "index", ",", "None", ")", ")", "except", "TransportError", "as", "e", ":", "raise", "Exception", "(", "'Failed to created index, got: {}'", ".", "format", "(", "e", ".", "error", ")", ")" ]
Similar to shutil . copyfileobj but supports limiting data size .
def _copy_stream ( src , dest , length = 0 ) : if length == 0 : shutil . copyfileobj ( src , dest ) return bytes_left = length while bytes_left > 0 : buf_size = min ( _BUFFER_SIZE , bytes_left ) buf = src . read ( buf_size ) dest . write ( buf ) bytes_left -= buf_size
6,696
https://github.com/sio2project/filetracker/blob/359b474850622e3d0c25ee2596d7242c02f84efb/filetracker/servers/storage.py#L375-L400
[ "def", "_images", "(", "self", ",", "sys_output", ")", ":", "import", "re", "gap_pattern", "=", "re", ".", "compile", "(", "'\\t|\\s{2,}'", ")", "image_list", "=", "[", "]", "output_lines", "=", "sys_output", ".", "split", "(", "'\\n'", ")", "column_headers", "=", "gap_pattern", ".", "split", "(", "output_lines", "[", "0", "]", ")", "for", "i", "in", "range", "(", "1", ",", "len", "(", "output_lines", ")", ")", ":", "columns", "=", "gap_pattern", ".", "split", "(", "output_lines", "[", "i", "]", ")", "if", "len", "(", "columns", ")", "==", "len", "(", "column_headers", ")", ":", "image_details", "=", "{", "}", "for", "j", "in", "range", "(", "len", "(", "columns", ")", ")", ":", "image_details", "[", "column_headers", "[", "j", "]", "]", "=", "columns", "[", "j", "]", "image_list", ".", "append", "(", "image_details", ")", "return", "image_list" ]
Checks if the path exists - is a file a directory or a symbolic link that may be broken .
def _path_exists ( path ) : return os . path . exists ( path ) or os . path . islink ( path )
6,697
https://github.com/sio2project/filetracker/blob/359b474850622e3d0c25ee2596d7242c02f84efb/filetracker/servers/storage.py#L420-L423
[ "def", "convert", "(", "self", ",", "request", ",", "response", ",", "data", ")", ":", "result", "=", "[", "]", "for", "conv", ",", "datum", "in", "zip", "(", "self", ".", "conversions", ",", "data", ")", ":", "# Only include conversion if it's allowed", "if", "conv", ".", "modifier", ".", "accept", "(", "response", ".", "status_code", ")", ":", "result", ".", "append", "(", "conv", ".", "convert", "(", "request", ",", "response", ",", "datum", ")", ")", "else", ":", "result", ".", "append", "(", "'-'", ")", "return", "''", ".", "join", "(", "result", ")" ]
A simple wrapper for fcntl exclusive lock .
def _exclusive_lock ( path ) : _create_file_dirs ( path ) fd = os . open ( path , os . O_WRONLY | os . O_CREAT , 0o600 ) try : retries_left = _LOCK_RETRIES success = False while retries_left > 0 : # try to acquire the lock in a loop # because gevent doesn't treat flock as IO, # so waiting here without yielding would get the worker killed try : fcntl . flock ( fd , fcntl . LOCK_EX | fcntl . LOCK_NB ) success = True break except IOError as e : if e . errno in [ errno . EAGAIN , errno . EWOULDBLOCK ] : # This yields execution to other green threads. gevent . sleep ( _LOCK_SLEEP_TIME_S ) retries_left -= 1 else : raise if success : yield else : raise ConcurrentModificationError ( path ) finally : if success : fcntl . flock ( fd , fcntl . LOCK_UN ) os . close ( fd )
6,698
https://github.com/sio2project/filetracker/blob/359b474850622e3d0c25ee2596d7242c02f84efb/filetracker/servers/storage.py#L431-L463
[ "def", "text_array_to_html", "(", "text_arr", ")", ":", "if", "not", "text_arr", ".", "shape", ":", "# It is a scalar. No need to put it in a table, just apply markdown", "return", "plugin_util", ".", "markdown_to_safe_html", "(", "np", ".", "asscalar", "(", "text_arr", ")", ")", "warning", "=", "''", "if", "len", "(", "text_arr", ".", "shape", ")", ">", "2", ":", "warning", "=", "plugin_util", ".", "markdown_to_safe_html", "(", "WARNING_TEMPLATE", "%", "len", "(", "text_arr", ".", "shape", ")", ")", "text_arr", "=", "reduce_to_2d", "(", "text_arr", ")", "html_arr", "=", "[", "plugin_util", ".", "markdown_to_safe_html", "(", "x", ")", "for", "x", "in", "text_arr", ".", "reshape", "(", "-", "1", ")", "]", "html_arr", "=", "np", ".", "array", "(", "html_arr", ")", ".", "reshape", "(", "text_arr", ".", "shape", ")", "return", "warning", "+", "make_table", "(", "html_arr", ")" ]
Removes a file from the storage .
def delete ( self , name , version , _lock = True ) : link_path = self . _link_path ( name ) if _lock : file_lock = _exclusive_lock ( self . _lock_path ( 'links' , name ) ) else : file_lock = _no_lock ( ) with file_lock : logger . debug ( 'Acquired or inherited lock for link %s.' , name ) if not _path_exists ( link_path ) : raise FiletrackerFileNotFoundError if _file_version ( link_path ) > version : logger . info ( 'Tried to delete newer version of %s (%d < %d), ignoring.' , name , version , _file_version ( link_path ) ) return False digest = self . _digest_for_link ( name ) with _exclusive_lock ( self . _lock_path ( 'blobs' , digest ) ) : logger . debug ( 'Acquired lock for blob %s.' , digest ) should_delete_blob = False with self . _db_transaction ( ) as txn : logger . debug ( 'Started DB transaction (deleting link).' ) digest_bytes = digest . encode ( ) link_count = self . db . get ( digest_bytes , txn = txn ) if link_count is None : raise RuntimeError ( "File exists but has no key in db" ) link_count = int ( link_count ) if link_count == 1 : logger . debug ( 'Deleting last link to blob %s.' , digest ) self . db . delete ( digest_bytes , txn = txn ) self . db . delete ( '{}:logical_size' . format ( digest ) . encode ( ) , txn = txn ) should_delete_blob = True else : new_count = str ( link_count - 1 ) . encode ( ) self . db . put ( digest_bytes , new_count , txn = txn ) logger . debug ( 'Committing DB transaction (deleting link).' ) logger . debug ( 'Committed DB transaction (deleting link).' ) os . unlink ( link_path ) logger . debug ( 'Deleted link %s.' , name ) if should_delete_blob : os . unlink ( self . _blob_path ( digest ) ) logger . debug ( 'Released lock for blob %s.' , digest ) logger . debug ( 'Released (or gave back) lock for link %s.' , name ) return True
6,699
https://github.com/sio2project/filetracker/blob/359b474850622e3d0c25ee2596d7242c02f84efb/filetracker/servers/storage.py#L222-L287
[ "def", "update", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "augment_args", "(", "args", ",", "kwargs", ")", "kwargs", "[", "'log_action'", "]", "=", "kwargs", ".", "get", "(", "'log_action'", ",", "'update'", ")", "if", "not", "self", ".", "rec", ":", "return", "self", ".", "add", "(", "*", "*", "kwargs", ")", "else", ":", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", ":", "# Don't update object; use whatever was set in the original record", "if", "k", "not", "in", "(", "'source'", ",", "'s_vid'", ",", "'table'", ",", "'t_vid'", ",", "'partition'", ",", "'p_vid'", ")", ":", "setattr", "(", "self", ".", "rec", ",", "k", ",", "v", ")", "self", ".", "_session", ".", "merge", "(", "self", ".", "rec", ")", "if", "self", ".", "_logger", ":", "self", ".", "_logger", ".", "info", "(", "self", ".", "rec", ".", "log_str", ")", "self", ".", "_session", ".", "commit", "(", ")", "self", ".", "_ai_rec_id", "=", "None", "return", "self", ".", "rec", ".", "id" ]