query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Accepts the current item as the current column .
def acceptColumn ( self ) : self . navigator ( ) . hide ( ) self . lineEdit ( ) . setText ( self . navigator ( ) . currentSchemaPath ( ) ) self . emitSchemaColumnChanged ( self . navigator ( ) . currentColumn ( ) )
11,200
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbcolumnnavigator.py#L270-L276
[ "def", "public_broadcaster", "(", ")", ":", "while", "__websocket_server_running__", ":", "pipein", "=", "open", "(", "PUBLIC_PIPE", ",", "'r'", ")", "line", "=", "pipein", ".", "readline", "(", ")", ".", "replace", "(", "'\\n'", ",", "''", ")", ".", "replace", "(", "'\\r'", ",", "''", ")", "if", "line", "!=", "''", ":", "WebSocketHandler", ".", "broadcast", "(", "line", ")", "print", "line", "remaining_lines", "=", "pipein", ".", "read", "(", ")", "pipein", ".", "close", "(", ")", "pipeout", "=", "open", "(", "PUBLIC_PIPE", ",", "'w'", ")", "pipeout", ".", "write", "(", "remaining_lines", ")", "pipeout", ".", "close", "(", ")", "else", ":", "pipein", ".", "close", "(", ")", "time", ".", "sleep", "(", "0.05", ")" ]
Displays the popup associated with this navigator .
def showPopup ( self ) : nav = self . navigator ( ) nav . move ( self . mapToGlobal ( QPoint ( 0 , self . height ( ) ) ) ) nav . resize ( 400 , 250 ) nav . show ( )
11,201
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbcolumnnavigator.py#L428-L436
[ "def", "_read_body_by_chunk", "(", "self", ",", "response", ",", "file", ",", "raw", "=", "False", ")", ":", "reader", "=", "ChunkedTransferReader", "(", "self", ".", "_connection", ")", "file_is_async", "=", "hasattr", "(", "file", ",", "'drain'", ")", "while", "True", ":", "chunk_size", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_header", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "if", "not", "chunk_size", ":", "break", "while", "True", ":", "content", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_body", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "not", "content", ":", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "break", "content", "=", "self", ".", "_decompress_data", "(", "content", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "content", "=", "self", ".", "_flush_decompressor", "(", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "trailer_data", "=", "yield", "from", "reader", ".", "read_trailer", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "trailer_data", ")", "if", "file", "and", "raw", ":", "file", ".", "write", "(", "trailer_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "response", ".", "fields", ".", "parse", "(", "trailer_data", ")" ]
Finds the sha256 hash of the content .
def sha256 ( content ) : if isinstance ( content , str ) : content = content . encode ( 'utf-8' ) return hashlib . sha256 ( content ) . hexdigest ( )
11,202
https://github.com/kimdhamilton/merkle-proofs/blob/77551cc65f72b50ac203f10a5069cb1a5b3ffb49/merkleproof/hash_functions.py#L9-L13
[ "def", "_enable_notifications_failed", "(", "self", ",", "dbus_error", ")", ":", "if", "(", "(", "dbus_error", ".", "get_dbus_name", "(", ")", "==", "'org.bluez.Error.Failed'", ")", "and", "(", "(", "dbus_error", ".", "get_dbus_message", "(", ")", "==", "\"Already notifying\"", ")", "or", "(", "dbus_error", ".", "get_dbus_message", "(", ")", "==", "\"No notify session started\"", ")", ")", ")", ":", "# Ignore cases where notifications where already enabled or already disabled", "return", "error", "=", "_error_from_dbus_error", "(", "dbus_error", ")", "self", ".", "service", ".", "device", ".", "characteristic_enable_notifications_failed", "(", "characteristic", "=", "self", ",", "error", "=", "error", ")" ]
Gets the cursor by type if as_dict is ture make a dict sql connection cursor
def cursor ( self , as_dict = False ) : self . ensure_connect ( ) ctype = self . real_ctype ( as_dict ) return self . _connect . cursor ( ctype )
11,203
https://github.com/whiteclover/dbpy/blob/3d9ce85f55cfb39cced22081e525f79581b26b3a/db/connection.py#L57-L61
[ "def", "_get_snapshot", "(", "self", ")", ":", "return", "self", ".", "_scheduler_session", ".", "product_request", "(", "Snapshot", ",", "subjects", "=", "[", "PathGlobs", "(", "self", ".", "_invalidation_globs", ")", "]", ")", "[", "0", "]" ]
Gets members of current team
def members ( self ) : resp = self . _rtm_client . get ( 'v1/current_team.members?all=true' ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get members of current team' , resp ) return resp . data [ 'result' ]
11,204
https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/rtm_client_service.py#L33-L48
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Gets channels of current team
def channels ( self ) : resp = self . _rtm_client . get ( 'v1/current_team.channels' ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get channels of current team' , resp ) return resp . data [ 'result' ]
11,205
https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/rtm_client_service.py#L50-L65
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Gets user information by user id
def info ( self , user_id ) : resp = self . _rtm_client . get ( 'v1/user.info?user_id={}' . format ( user_id ) ) if resp . is_fail ( ) : raise RTMServiceError ( 'Failed to get user information' , resp ) return resp . data [ 'result' ]
11,206
https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/rtm_client_service.py#L69-L85
[ "def", "initializerepo", "(", "self", ")", ":", "try", ":", "os", ".", "mkdir", "(", "self", ".", "repopath", ")", "except", "OSError", ":", "pass", "cmd", "=", "self", ".", "repo", ".", "init", "(", "bare", "=", "self", ".", "bare", ",", "shared", "=", "self", ".", "shared", ")", "if", "not", "self", ".", "bare", ":", "self", ".", "write_testing_data", "(", "[", "]", ",", "[", "]", ")", "self", ".", "write_training_data", "(", "[", "]", ",", "[", "]", ")", "self", ".", "write_classifier", "(", "None", ")", "cmd", "=", "self", ".", "repo", ".", "add", "(", "'training.pkl'", ")", "cmd", "=", "self", ".", "repo", ".", "add", "(", "'testing.pkl'", ")", "cmd", "=", "self", ".", "repo", ".", "add", "(", "'classifier.pkl'", ")", "cmd", "=", "self", ".", "repo", ".", "commit", "(", "m", "=", "'initial commit'", ")", "cmd", "=", "self", ".", "repo", ".", "tag", "(", "'initial'", ")", "cmd", "=", "self", ".", "set_version", "(", "'initial'", ")" ]
Gets channel information by channel id
def info ( self , channel_id ) : resource = 'v1/channel.info?channel_id={}' . format ( channel_id ) resp = self . _rtm_client . get ( resource ) if resp . is_fail ( ) : raise RTMServiceError ( "Failed to get channel information" , resp ) return resp . data [ 'result' ]
11,207
https://github.com/bearyinnovative/bearychat.py/blob/6c7af2d215c2ff7135bb5af66ca333d0ea1089fd/bearychat/rtm_client_service.py#L89-L106
[ "def", "write_workbook", "(", "filename", ",", "table_list", ",", "column_width", "=", "None", ")", ":", "# Modify default header format", "# Pandas' default header format is bold text with thin borders. Here we", "# use bold text only, without borders.", "# The header style structure is in pd.core.format in pandas<=0.18.0,", "# pd.formats.format in 0.18.1<=pandas<0.20, and pd.io.formats.excel in", "# pandas>=0.20.", "# Also, wrap in a try-except block in case style structure is not found.", "format_module_found", "=", "False", "try", ":", "# Get format module", "if", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<=", "packaging", ".", "version", ".", "parse", "(", "'0.18'", ")", ":", "format_module", "=", "pd", ".", "core", ".", "format", "elif", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<", "packaging", ".", "version", ".", "parse", "(", "'0.20'", ")", ":", "format_module", "=", "pd", ".", "formats", ".", "format", "else", ":", "import", "pandas", ".", "io", ".", "formats", ".", "excel", "as", "format_module", "# Save previous style, replace, and indicate that previous style should", "# be restored at the end", "old_header_style", "=", "format_module", ".", "header_style", "format_module", ".", "header_style", "=", "{", "\"font\"", ":", "{", "\"bold\"", ":", "True", "}", "}", "format_module_found", "=", "True", "except", "AttributeError", "as", "e", ":", "pass", "# Generate output writer object", "writer", "=", "pd", ".", "ExcelWriter", "(", "filename", ",", "engine", "=", "'xlsxwriter'", ")", "# Write tables", "for", "sheet_name", ",", "df", "in", "table_list", ":", "# Convert index names to regular columns", "df", "=", "df", ".", "reset_index", "(", ")", "# Write to an Excel sheet", "df", ".", "to_excel", "(", "writer", ",", "sheet_name", "=", "sheet_name", ",", "index", "=", "False", ")", "# Set column width", "if", "column_width", "is", "None", ":", "for", "i", ",", "(", "col_name", ",", "column", ")", "in", "enumerate", "(", "six", ".", "iteritems", "(", "df", ")", ")", ":", "# Get the maximum number of characters in a column", "max_chars_col", "=", "column", ".", "astype", "(", "str", ")", ".", "str", ".", "len", "(", ")", ".", "max", "(", ")", "max_chars_col", "=", "max", "(", "len", "(", "col_name", ")", ",", "max_chars_col", ")", "# Write width", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "i", ",", "i", ",", "width", "=", "1.", "*", "max_chars_col", ")", "else", ":", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "0", ",", "len", "(", "df", ".", "columns", ")", "-", "1", ",", "width", "=", "column_width", ")", "# Write excel file", "writer", ".", "save", "(", ")", "# Restore previous header format", "if", "format_module_found", ":", "format_module", ".", "header_style", "=", "old_header_style" ]
Transform the array from Ideone into a Python dictionary .
def _transform_to_dict ( result ) : result_dict = { } property_list = result . item for item in property_list : result_dict [ item . key [ 0 ] ] = item . value [ 0 ] return result_dict
11,208
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L40-L48
[ "def", "_file_watcher", "(", "state", ")", ":", "conf", "=", "state", ".", "app", ".", "config", "file_path", "=", "conf", ".", "get", "(", "'WAFFLE_WATCHER_FILE'", ",", "'/tmp/waffleconf.txt'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "# Create watch file", "open", "(", "file_path", ",", "'a'", ")", ".", "close", "(", ")", "while", "True", ":", "tstamp", "=", "os", ".", "path", ".", "getmtime", "(", "file_path", ")", "# Compare timestamps and update config if needed", "if", "tstamp", ">", "state", ".", "_tstamp", ":", "state", ".", "update_conf", "(", ")", "state", ".", "_tstamp", "=", "tstamp", "# Not too critical", "time", ".", "sleep", "(", "10", ")" ]
Convert the Ideone language list into a Python dictionary .
def _collapse_language_array ( language_array ) : language_dict = { } for language in language_array . item : key = language . key [ 0 ] value = language . value [ 0 ] language_dict [ key ] = value return language_dict
11,209
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L62-L72
[ "def", "long_path_formatter", "(", "line", ",", "max_width", "=", "pd", ".", "get_option", "(", "'max_colwidth'", ")", ")", ":", "if", "len", "(", "line", ")", ">", "max_width", ":", "tokens", "=", "line", ".", "split", "(", "\".\"", ")", "trial1", "=", "\"%s...%s\"", "%", "(", "tokens", "[", "0", "]", ",", "tokens", "[", "-", "1", "]", ")", "if", "len", "(", "trial1", ")", ">", "max_width", ":", "return", "\"...%s\"", "%", "(", "tokens", "[", "-", "1", "]", "[", "-", "1", ":", "-", "(", "max_width", "-", "3", ")", "]", ")", "else", ":", "return", "trial1", "else", ":", "return", "line" ]
Translate a human readable langauge name into its Ideone integer representation .
def _translate_language_name ( self , language_name ) : languages = self . languages ( ) language_id = None # Check for exact match first including the whole version # string for ideone_index , ideone_language in languages . items ( ) : if ideone_language . lower ( ) == language_name . lower ( ) : return ideone_index # Check for a match of just the language name without any # version information simple_languages = dict ( ( k , v . split ( '(' ) [ 0 ] . strip ( ) ) for ( k , v ) in languages . items ( ) ) for ideone_index , simple_name in simple_languages . items ( ) : if simple_name . lower ( ) == language_name . lower ( ) : return ideone_index # Give up, but first find a similar name, suggest it and error # out language_choices = languages . values ( ) + simple_languages . values ( ) similar_choices = difflib . get_close_matches ( language_name , language_choices , n = 3 , cutoff = 0.3 ) # Add quotes and delimit with strings for easier to read # output similar_choices_string = ", " . join ( [ "'" + s + "'" for s in similar_choices ] ) error_string = ( "Couldn't match '%s' to an Ideone accepted language.\n" "Did you mean one of the following: %s" ) raise IdeoneError ( error_string % ( language_name , similar_choices_string ) )
11,210
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L74-L137
[ "def", "loop", "(", "self", ")", ":", "while", "True", ":", "sleep", "(", "1", ")", "new_file_list", "=", "self", ".", "walk", "(", "self", ".", "file_path", ",", "{", "}", ")", "if", "new_file_list", "!=", "self", ".", "file_list", ":", "if", "self", ".", "debug", ":", "self", ".", "diff_list", "(", "new_file_list", ",", "self", ".", "file_list", ")", "self", ".", "run_tests", "(", ")", "self", ".", "file_list", "=", "new_file_list" ]
Create a submission and upload it to Ideone .
def create_submission ( self , source_code , language_name = None , language_id = None , std_input = "" , run = True , private = False ) : language_id = language_id or self . _translate_language_name ( language_name ) result = self . client . service . createSubmission ( self . user , self . password , source_code , language_id , std_input , run , private ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
11,211
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L141-L179
[ "def", "del_properties", "(", "self", ",", "pathobj", ",", "props", ",", "recursive", ")", ":", "if", "isinstance", "(", "props", ",", "str", ")", ":", "props", "=", "(", "props", ",", ")", "url", "=", "'/'", ".", "join", "(", "[", "pathobj", ".", "drive", ",", "'api/storage'", ",", "str", "(", "pathobj", ".", "relative_to", "(", "pathobj", ".", "drive", ")", ")", ".", "strip", "(", "'/'", ")", "]", ")", "params", "=", "{", "'properties'", ":", "','", ".", "join", "(", "sorted", "(", "props", ")", ")", "}", "if", "not", "recursive", ":", "params", "[", "'recursive'", "]", "=", "'0'", "text", ",", "code", "=", "self", ".", "rest_del", "(", "url", ",", "params", "=", "params", ",", "auth", "=", "pathobj", ".", "auth", ",", "verify", "=", "pathobj", ".", "verify", ",", "cert", "=", "pathobj", ".", "cert", ")", "if", "code", "==", "404", "and", "\"Unable to find item\"", "in", "text", ":", "raise", "OSError", "(", "2", ",", "\"No such file or directory: '%s'\"", "%", "url", ")", "if", "code", "!=", "204", ":", "raise", "RuntimeError", "(", "text", ")" ]
Given the unique link of a submission returns its current status .
def submission_status ( self , link ) : result = self . client . service . getSubmissionStatus ( self . user , self . password , link ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
11,212
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L181-L234
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
Return a dictionary of requested details about a submission with the id of link .
def submission_details ( self , link , with_source = True , with_input = True , with_output = True , with_stderr = True , with_compilation_info = True ) : result = self . client . service . getSubmissionDetails ( self . user , self . password , link , with_source , with_input , with_output , with_stderr , with_compilation_info ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) return result_dict
11,213
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L236-L283
[ "def", "clean_all", "(", "self", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "logging", ".", "info", "(", "'[begin] Cleaning all provisioned artifacts'", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "config", ".", "is_fresh", "(", ")", "is", "True", ":", "raise", "ValueError", "(", "\"Config is already clean.\"", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "self", ".", "_delete_group", "(", "config_file", ",", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "self", ".", "clean_core", "(", "config_file", ",", "region", "=", "region", ")", "self", ".", "clean_devices", "(", "config_file", ",", "region", "=", "region", ")", "self", ".", "clean_file", "(", "config_file", ")", "logging", ".", "info", "(", "'[end] Cleaned all provisioned artifacts'", ")" ]
Get a list of supported languages and cache it .
def languages ( self ) : if self . _language_dict is None : result = self . client . service . getLanguages ( self . user , self . password ) result_dict = Ideone . _transform_to_dict ( result ) Ideone . _handle_error ( result_dict ) languages = result_dict [ 'languages' ] result_dict [ 'languages' ] = Ideone . _collapse_language_array ( languages ) self . _language_dict = result_dict [ 'languages' ] return self . _language_dict
11,214
https://github.com/jschaf/ideone-api/blob/2e97767071d5be53c1d435f755b425a6dd8f2514/ideone/__init__.py#L285-L309
[ "def", "object", "(", "self", ")", ":", "if", "self", ".", "type", "==", "EntryType", ".", "category", ":", "return", "self", ".", "category", "elif", "self", ".", "type", "==", "EntryType", ".", "event", ":", "return", "self", ".", "event", "elif", "self", ".", "type", "==", "EntryType", ".", "session", ":", "return", "self", ".", "session", "elif", "self", ".", "type", "==", "EntryType", ".", "contribution", ":", "return", "self", ".", "contribution", "elif", "self", ".", "type", "==", "EntryType", ".", "subcontribution", ":", "return", "self", ".", "subcontribution" ]
Returns True if the file is binary
def is_binary ( filename ) : with open ( filename , 'rb' ) as fp : data = fp . read ( 1024 ) if not data : return False if b'\0' in data : return True return False
11,215
https://github.com/dmerejkowsky/replacer/blob/8dc16f297d0ff3a6ee2fa3c0d77789a6859b0f6a/replacer.py#L55-L65
[ "def", "is_valid", "(", "self", ")", ":", "valid", "=", "super", "(", "RegistrationContactForm", ",", "self", ")", ".", "is_valid", "(", ")", "msgs", "=", "messages", ".", "get_messages", "(", "self", ".", "_request", ")", "# We only want validation messages to show up once, so pop messages that have already show up", "# before checking to see if any messages remain to be shown.", "prior_messages", "=", "self", ".", "_session", ".", "pop", "(", "'prior_messages'", ",", "[", "]", ")", "remaining_messages", "=", "[", "]", "for", "m", "in", "msgs", ":", "m_dict", "=", "{", "'message'", ":", "m", ".", "message", ",", "'level'", ":", "m", ".", "level", ",", "'extra_tags'", ":", "m", ".", "extra_tags", "}", "if", "m_dict", "not", "in", "prior_messages", ":", "remaining_messages", ".", "append", "(", "m_dict", ")", "if", "remaining_messages", ":", "self", ".", "_session", "[", "'prior_messages'", "]", "=", "remaining_messages", "self", ".", "_request", ".", "session", ".", "modified", "=", "True", "return", "False", "return", "valid" ]
Recusively go do the subdirectories of the directory calling the action on each file
def walk_files ( args , root , directory , action ) : for entry in os . listdir ( directory ) : if is_hidden ( args , entry ) : continue if is_excluded_directory ( args , entry ) : continue if is_in_default_excludes ( entry ) : continue if not is_included ( args , entry ) : continue if is_excluded ( args , entry , directory ) : continue entry = os . path . join ( directory , entry ) if os . path . isdir ( entry ) : walk_files ( args , root , entry , action ) if os . path . isfile ( entry ) : if is_binary ( entry ) : continue action ( entry )
11,216
https://github.com/dmerejkowsky/replacer/blob/8dc16f297d0ff3a6ee2fa3c0d77789a6859b0f6a/replacer.py#L110-L133
[ "def", "set_raw_holding_register", "(", "self", ",", "name", ",", "value", ")", ":", "self", ".", "_conn", ".", "write_register", "(", "unit", "=", "self", ".", "_slave", ",", "address", "=", "(", "self", ".", "_holding_regs", "[", "name", "]", "[", "'addr'", "]", ")", ",", "value", "=", "value", ")" ]
manages options when called from command line
def main ( args = None ) : parser = ArgumentParser ( usage = __usage__ ) parser . add_argument ( "--no-skip-hidden" , action = "store_false" , dest = "skip_hidden" , help = "Do not skip hidden files. " "Use this if you know what you are doing..." ) parser . add_argument ( "--include" , dest = "includes" , action = "append" , help = "Only replace in files matching theses patterns" ) parser . add_argument ( "--exclude" , dest = "excludes" , action = "append" , help = "Ignore files matching theses patterns" ) parser . add_argument ( "--backup" , action = "store_true" , dest = "backup" , help = "Create a backup for each file. " "By default, files are modified in place" ) parser . add_argument ( "--go" , action = "store_true" , dest = "go" , help = "Perform changes rather than just printing then" ) parser . add_argument ( "--dry-run" , "-n" , action = "store_false" , dest = "go" , help = "Do not change anything. This is the default" ) parser . add_argument ( "--color" , choices = [ "always" , "never" , "auto" ] , help = "When to colorize the output. " "Default: when output is a tty" ) parser . add_argument ( "--no-color" , action = "store_false" , dest = "color" , help = "Do not colorize output" ) parser . add_argument ( "--quiet" , "-q" , action = "store_true" , dest = "quiet" , help = "Do not produce any output" ) parser . add_argument ( "pattern" ) parser . add_argument ( "replacement" ) parser . add_argument ( "paths" , nargs = "*" ) parser . set_defaults ( includes = list ( ) , excludes = list ( ) , skip_hidden = True , backup = False , go = False , color = "auto" , quiet = False , ) args = parser . parse_args ( args = args ) setup_colors ( args ) repl_main ( args )
11,217
https://github.com/dmerejkowsky/replacer/blob/8dc16f297d0ff3a6ee2fa3c0d77789a6859b0f6a/replacer.py#L253-L300
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Loads this item .
def load ( self ) : if self . _loaded : return self . _loaded = True self . setChildIndicatorPolicy ( self . DontShowIndicatorWhenChildless ) if not self . isFolder ( ) : return path = self . filepath ( ) if not os . path . isdir ( path ) : path = os . path . dirname ( path ) for name in os . listdir ( path ) : # ignore 'hidden' folders if name . startswith ( '_' ) and not name . startswith ( '__' ) : continue # ignore special cases (only want modules for this) if '-' in name : continue # use the index or __init__ information if name in ( 'index.html' , '__init__.html' ) : self . _url = 'file:///%s/%s' % ( path , name ) continue # otherwise, load a childitem filepath = os . path . join ( path , name ) folder = os . path . isdir ( filepath ) XdkEntryItem ( self , filepath , folder = folder )
11,218
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/windows/xdkwindow/xdkitem.py#L122-L156
[ "def", "delete_secret_versions", "(", "self", ",", "path", ",", "versions", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "if", "not", "isinstance", "(", "versions", ",", "list", ")", "or", "len", "(", "versions", ")", "==", "0", ":", "error_msg", "=", "'argument to \"versions\" must be a list containing one or more integers, \"{versions}\" provided.'", ".", "format", "(", "versions", "=", "versions", ")", "raise", "exceptions", ".", "ParamValidationError", "(", "error_msg", ")", "params", "=", "{", "'versions'", ":", "versions", ",", "}", "api_path", "=", "'/v1/{mount_point}/delete/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "return", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")" ]
Attempts to read a version attribute from the given module that could be specified via several different names and formats .
def get_version ( module ) : version_names = [ "__version__" , "get_version" , "version" ] version_names . extend ( [ name . upper ( ) for name in version_names ] ) for name in version_names : try : version = getattr ( module , name ) except AttributeError : continue if callable ( version ) : version = version ( ) try : version = "." . join ( [ str ( i ) for i in version . __iter__ ( ) ] ) except AttributeError : pass return version
11,219
https://github.com/stephenmcd/sphinx-me/blob/9f51a04d58a90834a787246ce475a564b4f9e5ee/sphinx_me.py#L66-L84
[ "def", "win32_refresh_window", "(", "cls", ")", ":", "# Get console handle", "handle", "=", "windll", ".", "kernel32", ".", "GetConsoleWindow", "(", ")", "RDW_INVALIDATE", "=", "0x0001", "windll", ".", "user32", ".", "RedrawWindow", "(", "handle", ",", "None", ",", "None", ",", "c_uint", "(", "RDW_INVALIDATE", ")", ")" ]
Runs the project s setup . py script in a process with an arg that will print out the value for a particular attribute such as author or version and returns the value .
def get_setup_attribute ( attribute , setup_path ) : args = [ "python" , setup_path , "--%s" % attribute ] return Popen ( args , stdout = PIPE ) . communicate ( ) [ 0 ] . decode ( 'utf-8' ) . strip ( )
11,220
https://github.com/stephenmcd/sphinx-me/blob/9f51a04d58a90834a787246ce475a564b4f9e5ee/sphinx_me.py#L87-L94
[ "def", "delete_group", "(", "self", ",", "group", ")", ":", "try", ":", "lgroup", "=", "self", ".", "_get_group", "(", "group", ".", "name", ")", "delete", "(", "lgroup", ",", "database", "=", "self", ".", "_database", ")", "except", "ObjectDoesNotExist", ":", "# it doesn't matter if it doesn't exist", "pass" ]
Fetch the modules
def fetch_items ( self , category , * * kwargs ) : from_date = kwargs [ 'from_date' ] logger . info ( "Fetching modules from %s" , str ( from_date ) ) from_date_ts = datetime_to_utc ( from_date ) . timestamp ( ) nmodules = 0 stop_fetching = False raw_pages = self . client . modules ( ) for raw_modules in raw_pages : modules = [ mod for mod in self . parse_json ( raw_modules ) ] for module in modules : # Check timestamps to stop fetching more modules # because modules fetched sorted by 'updated_at' # from newest to oldest. updated_at_ts = self . metadata_updated_on ( module ) if from_date_ts > updated_at_ts : stop_fetching = True break owner = module [ 'owner' ] [ 'username' ] name = module [ 'name' ] module [ 'releases' ] = self . __fetch_and_parse_releases ( owner , name ) module [ 'owner_data' ] = self . __get_or_fetch_owner ( owner ) yield module nmodules += 1 if stop_fetching : break logger . info ( "Fetch process completed: %s modules fetched" , nmodules )
11,221
https://github.com/chaoss/grimoirelab-perceval-puppet/blob/4b215df2e8045ce3d6538e532e8b5c660ebed7ea/perceval/backends/puppet/puppetforge.py#L91-L134
[ "def", "tag_pos_volume", "(", "line", ")", ":", "def", "tagger", "(", "match", ")", ":", "groups", "=", "match", ".", "groupdict", "(", ")", "try", ":", "year", "=", "match", ".", "group", "(", "'year'", ")", "except", "IndexError", ":", "# Extract year from volume name", "# which should always include the year", "g", "=", "re", ".", "search", "(", "re_pos_year_num", ",", "match", ".", "group", "(", "'volume_num'", ")", ",", "re", ".", "UNICODE", ")", "year", "=", "g", ".", "group", "(", "0", ")", "if", "year", ":", "groups", "[", "'year'", "]", "=", "' <cds.YR>(%s)</cds.YR>'", "%", "year", ".", "strip", "(", ")", ".", "strip", "(", "'()'", ")", "else", ":", "groups", "[", "'year'", "]", "=", "''", "return", "'<cds.JOURNAL>PoS</cds.JOURNAL>'", "' <cds.VOL>%(volume_name)s%(volume_num)s</cds.VOL>'", "'%(year)s'", "' <cds.PG>%(page)s</cds.PG>'", "%", "groups", "for", "p", "in", "re_pos", ":", "line", "=", "p", ".", "sub", "(", "tagger", ",", "line", ")", "return", "line" ]
Parse a Puppet forge JSON stream .
def parse_json ( raw_json ) : result = json . loads ( raw_json ) if 'results' in result : result = result [ 'results' ] return result
11,222
https://github.com/chaoss/grimoirelab-perceval-puppet/blob/4b215df2e8045ce3d6538e532e8b5c660ebed7ea/perceval/backends/puppet/puppetforge.py#L185-L200
[ "async", "def", "dist", "(", "self", ",", "mesg", ")", ":", "if", "self", ".", "isfini", ":", "return", "(", ")", "ret", "=", "[", "]", "for", "func", "in", "self", ".", "_syn_funcs", ".", "get", "(", "mesg", "[", "0", "]", ",", "(", ")", ")", ":", "try", ":", "ret", ".", "append", "(", "await", "s_coro", ".", "ornot", "(", "func", ",", "mesg", ")", ")", "except", "asyncio", ".", "CancelledError", ":", "raise", "except", "Exception", ":", "logger", ".", "exception", "(", "'base %s error with mesg %s'", ",", "self", ",", "mesg", ")", "for", "func", "in", "self", ".", "_syn_links", ":", "try", ":", "ret", ".", "append", "(", "await", "func", "(", "mesg", ")", ")", "except", "asyncio", ".", "CancelledError", ":", "raise", "except", "Exception", ":", "logger", ".", "exception", "(", "'base %s error with mesg %s'", ",", "self", ",", "mesg", ")", "return", "ret" ]
Fetch modules pages .
def modules ( self ) : resource = self . RMODULES params = { self . PLIMIT : self . max_items , self . PSORT_BY : self . VLATEST_RELEASE } for page in self . _fetch ( resource , params ) : yield page
11,223
https://github.com/chaoss/grimoirelab-perceval-puppet/blob/4b215df2e8045ce3d6538e532e8b5c660ebed7ea/perceval/backends/puppet/puppetforge.py#L264-L275
[ "def", "removefromreadergroup", "(", "self", ",", "groupname", ")", ":", "hresult", ",", "hcontext", "=", "SCardEstablishContext", "(", "SCARD_SCOPE_USER", ")", "if", "0", "!=", "hresult", ":", "raise", "EstablishContextException", "(", "hresult", ")", "try", ":", "hresult", "=", "SCardRemoveReaderFromGroup", "(", "hcontext", ",", "self", ".", "name", ",", "groupname", ")", "if", "0", "!=", "hresult", ":", "raise", "RemoveReaderFromGroupException", "(", "hresult", ",", "self", ".", "name", ",", "groupname", ")", "finally", ":", "hresult", "=", "SCardReleaseContext", "(", "hcontext", ")", "if", "0", "!=", "hresult", ":", "raise", "ReleaseContextException", "(", "hresult", ")" ]
Fetch the releases of a module .
def releases ( self , owner , module ) : resource = self . RRELEASES params = { self . PMODULE : owner + '-' + module , self . PLIMIT : self . max_items , self . PSHOW_DELETED : 'true' , self . PSORT_BY : self . VRELEASE_DATE , } for page in self . _fetch ( resource , params ) : yield page
11,224
https://github.com/chaoss/grimoirelab-perceval-puppet/blob/4b215df2e8045ce3d6538e532e8b5c660ebed7ea/perceval/backends/puppet/puppetforge.py#L277-L290
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'matching_results'", "in", "_dict", ":", "args", "[", "'matching_results'", "]", "=", "_dict", ".", "get", "(", "'matching_results'", ")", "if", "'results'", "in", "_dict", ":", "args", "[", "'results'", "]", "=", "[", "LogQueryResponseResult", ".", "_from_dict", "(", "x", ")", "for", "x", "in", "(", "_dict", ".", "get", "(", "'results'", ")", ")", "]", "return", "cls", "(", "*", "*", "args", ")" ]
Resets the current tree to empty .
def reset_tree ( self ) : self . tree = { } self . tree [ 'leaves' ] = [ ] self . tree [ 'levels' ] = [ ] self . tree [ 'is_ready' ] = False
11,225
https://github.com/kimdhamilton/merkle-proofs/blob/77551cc65f72b50ac203f10a5069cb1a5b3ffb49/merkleproof/MerkleTree.py#L25-L32
[ "def", "delay_and_stop", "(", "duration", ",", "dll", ",", "device_number", ")", ":", "xinput", "=", "getattr", "(", "ctypes", ".", "windll", ",", "dll", ")", "time", ".", "sleep", "(", "duration", "/", "1000", ")", "xinput_set_state", "=", "xinput", ".", "XInputSetState", "xinput_set_state", ".", "argtypes", "=", "[", "ctypes", ".", "c_uint", ",", "ctypes", ".", "POINTER", "(", "XinputVibration", ")", "]", "xinput_set_state", ".", "restype", "=", "ctypes", ".", "c_uint", "vibration", "=", "XinputVibration", "(", "0", ",", "0", ")", "xinput_set_state", "(", "device_number", ",", "ctypes", ".", "byref", "(", "vibration", ")", ")" ]
Add leaves to the tree .
def add_leaves ( self , values_array , do_hash = False ) : self . tree [ 'is_ready' ] = False [ self . _add_leaf ( value , do_hash ) for value in values_array ]
11,226
https://github.com/kimdhamilton/merkle-proofs/blob/77551cc65f72b50ac203f10a5069cb1a5b3ffb49/merkleproof/MerkleTree.py#L43-L52
[ "def", "load_draco", "(", "file_obj", ",", "*", "*", "kwargs", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.drc'", ")", "as", "temp_drc", ":", "temp_drc", ".", "write", "(", "file_obj", ".", "read", "(", ")", ")", "temp_drc", ".", "flush", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.ply'", ")", "as", "temp_ply", ":", "subprocess", ".", "check_output", "(", "[", "draco_decoder", ",", "'-i'", ",", "temp_drc", ".", "name", ",", "'-o'", ",", "temp_ply", ".", "name", "]", ")", "temp_ply", ".", "seek", "(", "0", ")", "kwargs", "=", "load_ply", "(", "temp_ply", ")", "return", "kwargs" ]
Generates the merkle tree .
def make_tree ( self ) : self . tree [ 'is_ready' ] = False leaf_count = len ( self . tree [ 'leaves' ] ) if leaf_count > 0 : # skip this whole process if there are no leaves added to the tree self . _unshift ( self . tree [ 'levels' ] , self . tree [ 'leaves' ] ) while len ( self . tree [ 'levels' ] [ 0 ] ) > 1 : self . _unshift ( self . tree [ 'levels' ] , self . _calculate_next_level ( ) ) self . tree [ 'is_ready' ] = True
11,227
https://github.com/kimdhamilton/merkle-proofs/blob/77551cc65f72b50ac203f10a5069cb1a5b3ffb49/merkleproof/MerkleTree.py#L80-L91
[ "def", "_ensure_connection", "(", "self", ")", ":", "conn", "=", "self", ".", "connect", "(", ")", "if", "conn", ".", "recycle", "and", "conn", ".", "recycle", "<", "time", ".", "time", "(", ")", ":", "logger", ".", "debug", "(", "'Client session expired after %is. Recycling.'", ",", "self", ".", "_recycle", ")", "self", ".", "close", "(", ")", "conn", "=", "self", ".", "connect", "(", ")", "return", "conn" ]
Converts duration to timedelta
def duration_to_timedelta ( obj ) : matches = DURATION_PATTERN . search ( obj ) matches = matches . groupdict ( default = "0" ) matches = { k : int ( v ) for k , v in matches . items ( ) } return timedelta ( * * matches )
11,228
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/common/util.py#L39-L48
[ "def", "main", "(", ")", ":", "fmt", "=", "'svg'", "title", "=", "\"\"", "if", "'-h'", "in", "sys", ".", "argv", ":", "print", "(", "main", ".", "__doc__", ")", "sys", ".", "exit", "(", ")", "if", "'-f'", "in", "sys", ".", "argv", ":", "ind", "=", "sys", ".", "argv", ".", "index", "(", "'-f'", ")", "file", "=", "sys", ".", "argv", "[", "ind", "+", "1", "]", "X", "=", "numpy", ".", "loadtxt", "(", "file", ")", "file", "=", "sys", ".", "argv", "[", "ind", "+", "2", "]", "X2", "=", "numpy", ".", "loadtxt", "(", "file", ")", "# else:", "# X=numpy.loadtxt(sys.stdin,dtype=numpy.float)", "else", ":", "print", "(", "'-f option required'", ")", "print", "(", "main", ".", "__doc__", ")", "sys", ".", "exit", "(", ")", "if", "'-fmt'", "in", "sys", ".", "argv", ":", "ind", "=", "sys", ".", "argv", ".", "index", "(", "'-fmt'", ")", "fmt", "=", "sys", ".", "argv", "[", "ind", "+", "1", "]", "if", "'-t'", "in", "sys", ".", "argv", ":", "ind", "=", "sys", ".", "argv", ".", "index", "(", "'-t'", ")", "title", "=", "sys", ".", "argv", "[", "ind", "+", "1", "]", "CDF", "=", "{", "'X'", ":", "1", "}", "pmagplotlib", ".", "plot_init", "(", "CDF", "[", "'X'", "]", ",", "5", ",", "5", ")", "pmagplotlib", ".", "plot_cdf", "(", "CDF", "[", "'X'", "]", ",", "X", ",", "''", ",", "'r'", ",", "''", ")", "pmagplotlib", ".", "plot_cdf", "(", "CDF", "[", "'X'", "]", ",", "X2", ",", "title", ",", "'b'", ",", "''", ")", "D", ",", "p", "=", "scipy", ".", "stats", ".", "ks_2samp", "(", "X", ",", "X2", ")", "if", "p", ">=", ".05", ":", "print", "(", "D", ",", "p", ",", "' not rejected at 95%'", ")", "else", ":", "print", "(", "D", ",", "p", ",", "' rejected at 95%'", ")", "pmagplotlib", ".", "draw_figs", "(", "CDF", ")", "ans", "=", "input", "(", "'S[a]ve plot, <Return> to quit '", ")", "if", "ans", "==", "'a'", ":", "files", "=", "{", "'X'", ":", "'CDF_.'", "+", "fmt", "}", "pmagplotlib", ".", "save_plots", "(", "CDF", ",", "files", ")" ]
Converts timedelta to duration
def timedelta_to_duration ( obj ) : minutes , hours , days = 0 , 0 , 0 seconds = int ( obj . total_seconds ( ) ) if seconds > 59 : minutes = seconds // 60 seconds = seconds % 60 if minutes > 59 : hours = minutes // 60 minutes = minutes % 60 if hours > 23 : days = hours // 24 hours = hours % 24 response = [ ] if days : response . append ( '%sd' % days ) if hours : response . append ( '%sh' % hours ) if minutes : response . append ( '%sm' % minutes ) if seconds or not response : response . append ( '%ss' % seconds ) return "" . join ( response )
11,229
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/common/util.py#L51-L77
[ "def", "write_publication", "(", "self", ",", "values", ")", ":", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "values", "=", "(", "values", "[", "'pub_id'", "]", ",", "values", "[", "'title'", "]", ",", "json", ".", "dumps", "(", "values", "[", "'authors'", "]", ")", ",", "values", "[", "'journal'", "]", ",", "values", "[", "'volume'", "]", ",", "values", "[", "'number'", "]", ",", "values", "[", "'pages'", "]", ",", "values", "[", "'year'", "]", ",", "values", "[", "'publisher'", "]", ",", "values", "[", "'doi'", "]", ",", "json", ".", "dumps", "(", "values", "[", "'tags'", "]", ")", ")", "q", "=", "self", ".", "default", "+", "','", "+", "', '", ".", "join", "(", "'?'", "*", "len", "(", "values", ")", ")", "cur", ".", "execute", "(", "'INSERT OR IGNORE INTO publication VALUES ({})'", ".", "format", "(", "q", ")", ",", "values", ")", "pid", "=", "self", ".", "get_last_id", "(", "cur", ",", "table", "=", "'publication'", ")", "if", "self", ".", "connection", "is", "None", ":", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return", "pid" ]
create ikuai dm message
def create_dm_pkg ( secret , username ) : secret = tools . EncodeString ( secret ) username = tools . EncodeString ( username ) pkg_format = '>HHHH32sHH32s' pkg_vals = [ IK_RAD_PKG_VER , IK_RAD_PKG_AUTH , IK_RAD_PKG_USR_PWD_TAG , len ( secret ) , secret . ljust ( 32 , '\x00' ) , IK_RAD_PKG_CMD_ARGS_TAG , len ( username ) , username . ljust ( 32 , '\x00' ) ] return struct . pack ( pkg_format , * pkg_vals )
11,230
https://github.com/talkincode/txradius/blob/b86fdbc9be41183680b82b07d3a8e8ea10926e01/txradius/ext/ikuai.py#L20-L35
[ "async", "def", "open", "(", "self", ")", "->", "None", ":", "LOGGER", ".", "debug", "(", "'StorageRecordSearch.open >>>'", ")", "if", "self", ".", "opened", ":", "LOGGER", ".", "debug", "(", "'StorageRecordSearch.open <!< Search is already opened'", ")", "raise", "BadSearch", "(", "'Search is already opened'", ")", "if", "not", "self", ".", "_wallet", ".", "opened", ":", "LOGGER", ".", "debug", "(", "'StorageRecordSearch.open <!< Wallet %s is closed'", ",", "self", ".", "_wallet", ".", "name", ")", "raise", "WalletState", "(", "'Wallet {} is closed'", ".", "format", "(", "self", ".", "_wallet", ".", "name", ")", ")", "self", ".", "_handle", "=", "await", "non_secrets", ".", "open_wallet_search", "(", "self", ".", "_wallet", ".", "handle", ",", "self", ".", "_type", ",", "self", ".", "_query_json", ",", "StorageRecordSearch", ".", "OPTIONS_JSON", ")", "LOGGER", ".", "debug", "(", "'StorageRecordSearch.open <<<'", ")" ]
Clears the file of all the breakpoints .
def clearBreakpoints ( self ) : self . markerDeleteAll ( self . _breakpointMarker ) if ( not self . signalsBlocked ( ) ) : self . breakpointsChanged . emit ( )
11,231
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xscintillaedit/xscintillaedit.py#L127-L134
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Unindents the current selected text .
def unindentSelection ( self ) : sel = self . getSelection ( ) for line in range ( sel [ 0 ] , sel [ 2 ] + 1 ) : self . unindent ( line )
11,232
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xscintillaedit/xscintillaedit.py#L742-L749
[ "def", "calibrate_data", "(", "params", ",", "raw_data", ",", "calib_data", ")", ":", "start", "=", "calib_data", ".", "before", "(", "datetime", ".", "max", ")", "if", "start", "is", "None", ":", "start", "=", "datetime", ".", "min", "start", "=", "raw_data", ".", "after", "(", "start", "+", "SECOND", ")", "if", "start", "is", "None", ":", "return", "start", "del", "calib_data", "[", "start", ":", "]", "calibrator", "=", "Calib", "(", "params", ",", "raw_data", ")", "def", "calibgen", "(", "inputdata", ")", ":", "\"\"\"Internal generator function\"\"\"", "count", "=", "0", "for", "data", "in", "inputdata", ":", "idx", "=", "data", "[", "'idx'", "]", "count", "+=", "1", "if", "count", "%", "10000", "==", "0", ":", "logger", ".", "info", "(", "\"calib: %s\"", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "elif", "count", "%", "500", "==", "0", ":", "logger", ".", "debug", "(", "\"calib: %s\"", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "for", "key", "in", "(", "'rain'", ",", "'abs_pressure'", ",", "'temp_in'", ")", ":", "if", "data", "[", "key", "]", "is", "None", ":", "logger", ".", "error", "(", "'Ignoring invalid data at %s'", ",", "idx", ".", "isoformat", "(", "' '", ")", ")", "break", "else", ":", "yield", "calibrator", ".", "calib", "(", "data", ")", "calib_data", ".", "update", "(", "calibgen", "(", "raw_data", "[", "start", ":", "]", ")", ")", "return", "start" ]
Removes punctuation symbols from a string .
def _removePunctuation ( text_string ) : try : return text_string . translate ( None , _punctuation ) except TypeError : return text_string . translate ( str . maketrans ( '' , '' , _punctuation ) )
11,233
https://github.com/starling-lab/rnlp/blob/72054cc2c0cbaea1d281bf3d56b271d4da29fc4a/rnlp/textprocessing.py#L49-L67
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'matching_results'", "in", "_dict", ":", "args", "[", "'matching_results'", "]", "=", "_dict", ".", "get", "(", "'matching_results'", ")", "if", "'results'", "in", "_dict", ":", "args", "[", "'results'", "]", "=", "[", "LogQueryResponseResult", ".", "_from_dict", "(", "x", ")", "for", "x", "in", "(", "_dict", ".", "get", "(", "'results'", ")", ")", "]", "return", "cls", "(", "*", "*", "args", ")" ]
Removes stopwords contained in a list of words .
def _removeStopwords ( text_list ) : output_list = [ ] for word in text_list : if word . lower ( ) not in _stopwords : output_list . append ( word ) return output_list
11,234
https://github.com/starling-lab/rnlp/blob/72054cc2c0cbaea1d281bf3d56b271d4da29fc4a/rnlp/textprocessing.py#L70-L87
[ "def", "cudnnSetStream", "(", "handle", ",", "id", ")", ":", "status", "=", "_libcudnn", ".", "cudnnSetStream", "(", "handle", ",", "id", ")", "cudnnCheckStatus", "(", "status", ")" ]
Get blocks of n sentences together .
def getBlocks ( sentences , n ) : blocks = [ ] for i in range ( 0 , len ( sentences ) , n ) : blocks . append ( sentences [ i : ( i + n ) ] ) return blocks
11,235
https://github.com/starling-lab/rnlp/blob/72054cc2c0cbaea1d281bf3d56b271d4da29fc4a/rnlp/textprocessing.py#L90-L120
[ "def", "get_pore_surface_parameters", "(", "surface_area", ")", ":", "PoreSurfaceParameters", "=", "DataFactory", "(", "'phtools.surface'", ")", "d", "=", "{", "'accessible_surface_area'", ":", "surface_area", ".", "get_dict", "(", ")", "[", "'ASA_A^2'", "]", ",", "'target_volume'", ":", "40e3", ",", "'sampling_method'", ":", "'random'", ",", "}", "return", "PoreSurfaceParameters", "(", "dict", "=", "d", ")" ]
Destroys the locked view from this widget .
def __destroyLockedView ( self ) : if self . _lockedView : self . _lockedView . close ( ) self . _lockedView . deleteLater ( ) self . _lockedView = None
11,236
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L185-L192
[ "def", "_domain_event_tunable_cb", "(", "conn", ",", "domain", ",", "params", ",", "opaque", ")", ":", "_salt_send_domain_event", "(", "opaque", ",", "conn", ",", "domain", ",", "opaque", "[", "'event'", "]", ",", "{", "'params'", ":", "params", "}", ")" ]
Removes all the items from this tree widget . This will go through and also destroy any XTreeWidgetItems prior to the model clearing its references .
def clear ( self ) : # go through and properly destroy all the items for this tree for item in self . traverseItems ( ) : if isinstance ( item , XTreeWidgetItem ) : item . destroy ( ) super ( XTreeWidget , self ) . clear ( )
11,237
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L401-L412
[ "def", "get_enroll", "(", "self", ")", ":", "devices", "=", "[", "DeviceRegistration", ".", "wrap", "(", "device", ")", "for", "device", "in", "self", ".", "__get_u2f_devices", "(", ")", "]", "enroll", "=", "start_register", "(", "self", ".", "__appid", ",", "devices", ")", "enroll", "[", "'status'", "]", "=", "'ok'", "session", "[", "'_u2f_enroll_'", "]", "=", "enroll", ".", "json", "return", "enroll" ]
Prompts the user to export the information for this tree based on the available exporters .
def exportAs ( self , action ) : plugin = self . exporter ( unwrapVariant ( action . data ( ) ) ) if not plugin : return False ftypes = '{0} (*{1});;All Files (*.*)' . format ( plugin . name ( ) , plugin . filetype ( ) ) filename = QtGui . QFileDialog . getSaveFileName ( self . window ( ) , 'Export Data' , '' , ftypes ) if type ( filename ) == tuple : filename = filename [ 0 ] if filename : return self . export ( nativestring ( filename ) , exporter = plugin ) return False
11,238
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L740-L761
[ "def", "from_non_aligned_residue_IDs", "(", "Chain", ",", "StartResidueID", ",", "EndResidueID", ",", "Sequence", "=", "None", ")", ":", "return", "PDBSection", "(", "Chain", ",", "PDB", ".", "ResidueID2String", "(", "StartResidueID", ")", ",", "PDB", ".", "ResidueID2String", "(", "EndResidueID", ")", ",", "Sequence", "=", "Sequence", ")" ]
Hides the current column set by the header index .
def headerHideColumn ( self ) : self . setColumnHidden ( self . _headerIndex , True ) # ensure we at least have 1 column visible found = False for col in range ( self . columnCount ( ) ) : if ( not self . isColumnHidden ( col ) ) : found = True break if ( not found ) : self . setColumnHidden ( 0 , False )
11,239
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L967-L981
[ "def", "_watch_progress", "(", "handler", ")", ":", "with", "_tmpdir_scope", "(", ")", "as", "tmpdir", ":", "socket_filename", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'sock'", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_STREAM", ")", "with", "contextlib", ".", "closing", "(", "sock", ")", ":", "sock", ".", "bind", "(", "socket_filename", ")", "sock", ".", "listen", "(", "1", ")", "child", "=", "gevent", ".", "spawn", "(", "_do_watch_progress", ",", "socket_filename", ",", "sock", ",", "handler", ")", "try", ":", "yield", "socket_filename", "except", ":", "gevent", ".", "kill", "(", "child", ")", "raise" ]
Sorts the column at the current header index by ascending order .
def headerSortAscending ( self ) : self . setSortingEnabled ( True ) self . sortByColumn ( self . _headerIndex , QtCore . Qt . AscendingOrder )
11,240
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L1000-L1005
[ "def", "shutdown", "(", "self", ")", ":", "try", ":", "self", ".", "_sessions", "=", "dict", "(", ")", "self", ".", "_subscriptions", "=", "dict", "(", ")", "self", ".", "_retained_messages", "=", "dict", "(", ")", "self", ".", "transitions", ".", "shutdown", "(", ")", "except", "(", "MachineError", ",", "ValueError", ")", "as", "exc", ":", "# Backwards compat: MachineError is raised by transitions < 0.5.0.", "self", ".", "logger", ".", "debug", "(", "\"Invalid method call at this moment: %s\"", "%", "exc", ")", "raise", "BrokerException", "(", "\"Broker instance can't be stopped: %s\"", "%", "exc", ")", "# Fire broker_shutdown event to plugins", "yield", "from", "self", ".", "plugins_manager", ".", "fire_event", "(", "EVENT_BROKER_PRE_SHUTDOWN", ")", "# Stop broadcast loop", "if", "self", ".", "_broadcast_task", ":", "self", ".", "_broadcast_task", ".", "cancel", "(", ")", "if", "self", ".", "_broadcast_queue", ".", "qsize", "(", ")", ">", "0", ":", "self", ".", "logger", ".", "warning", "(", "\"%d messages not broadcasted\"", "%", "self", ".", "_broadcast_queue", ".", "qsize", "(", ")", ")", "for", "listener_name", "in", "self", ".", "_servers", ":", "server", "=", "self", ".", "_servers", "[", "listener_name", "]", "yield", "from", "server", ".", "close_instance", "(", ")", "self", ".", "logger", ".", "debug", "(", "\"Broker closing\"", ")", "self", ".", "logger", ".", "info", "(", "\"Broker closed\"", ")", "yield", "from", "self", ".", "plugins_manager", ".", "fire_event", "(", "EVENT_BROKER_POST_SHUTDOWN", ")", "self", ".", "transitions", ".", "stopping_success", "(", ")" ]
Sorts the column at the current header index by descending order .
def headerSortDescending ( self ) : self . setSortingEnabled ( True ) self . sortByColumn ( self . _headerIndex , QtCore . Qt . DescendingOrder )
11,241
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L1007-L1012
[ "def", "merged", "(", "self", ")", ":", "stats", "=", "{", "}", "for", "topic", "in", "self", ".", "client", ".", "topics", "(", ")", "[", "'topics'", "]", ":", "for", "producer", "in", "self", ".", "client", ".", "lookup", "(", "topic", ")", "[", "'producers'", "]", ":", "hostname", "=", "producer", "[", "'broadcast_address'", "]", "port", "=", "producer", "[", "'http_port'", "]", "host", "=", "'%s_%s'", "%", "(", "hostname", ",", "port", ")", "stats", "[", "host", "]", "=", "nsqd", ".", "Client", "(", "'http://%s:%s/'", "%", "(", "hostname", ",", "port", ")", ")", ".", "clean_stats", "(", ")", "return", "stats" ]
Sets the palette highlighting for this tree widget to use a darker version of the alternate color vs . the standard highlighting .
def highlightByAlternate ( self ) : palette = QtGui . QApplication . palette ( ) palette . setColor ( palette . HighlightedText , palette . color ( palette . Text ) ) clr = palette . color ( palette . AlternateBase ) palette . setColor ( palette . Highlight , clr . darker ( 110 ) ) self . setPalette ( palette )
11,242
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L1028-L1038
[ "def", "clean", "(", "self", ",", "x", ")", ":", "return", "x", "[", "~", "np", ".", "any", "(", "np", ".", "isnan", "(", "x", ")", "|", "np", ".", "isinf", "(", "x", ")", ",", "axis", "=", "1", ")", "]" ]
Resizes the columns to the contents based on the user preferences .
def smartResizeColumnsToContents ( self ) : self . blockSignals ( True ) self . setUpdatesEnabled ( False ) header = self . header ( ) header . blockSignals ( True ) columns = range ( self . columnCount ( ) ) sizes = [ self . columnWidth ( c ) for c in columns ] header . resizeSections ( header . ResizeToContents ) for col in columns : width = self . columnWidth ( col ) if ( width < sizes [ col ] ) : self . setColumnWidth ( col , sizes [ col ] ) header . blockSignals ( False ) self . setUpdatesEnabled ( True ) self . blockSignals ( False )
11,243
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xtreewidget/xtreewidget.py#L1985-L2007
[ "def", "delete_attachments", "(", "self", ",", "volumeID", ",", "attachmentsID", ")", ":", "log", ".", "debug", "(", "\"deleting attachments from volume '{}': {}\"", ".", "format", "(", "volumeID", ",", "attachmentsID", ")", ")", "rawVolume", "=", "self", ".", "_req_raw_volume", "(", "volumeID", ")", "insID", "=", "[", "a", "[", "'id'", "]", "for", "a", "in", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", "]", "# check that all requested file are present", "for", "id", "in", "attachmentsID", ":", "if", "id", "not", "in", "insID", ":", "raise", "NotFoundException", "(", "\"could not found attachment '{}' of the volume '{}'\"", ".", "format", "(", "id", ",", "volumeID", ")", ")", "for", "index", ",", "id", "in", "enumerate", "(", "attachmentsID", ")", ":", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", ".", "pop", "(", "insID", ".", "index", "(", "id", ")", ")", "self", ".", "_db", ".", "modify_book", "(", "volumeID", ",", "rawVolume", "[", "'_source'", "]", ",", "version", "=", "rawVolume", "[", "'_version'", "]", ")" ]
Run your app in gevent . spawn run simple loop if start == True
def gevent_run ( app , monkey_patch = True , start = True , debug = False , * * kwargs ) : # pragma: no cover if monkey_patch : from gevent import monkey monkey . patch_all ( ) import gevent gevent . spawn ( app . run , debug = debug , * * kwargs ) if start : while not app . stopped : gevent . sleep ( 0.1 )
11,244
https://github.com/viatoriche/microservices/blob/3510563edd15dc6131b8a948d6062856cd904ac7/microservices/queues/runners.py#L1-L22
[ "def", "parse_torrent_properties", "(", "table_datas", ")", ":", "output", "=", "{", "'category'", ":", "table_datas", "[", "0", "]", ".", "text", ",", "'subcategory'", ":", "None", ",", "'quality'", ":", "None", ",", "'language'", ":", "None", "}", "for", "i", "in", "range", "(", "1", ",", "len", "(", "table_datas", ")", ")", ":", "td", "=", "table_datas", "[", "i", "]", "url", "=", "td", ".", "get", "(", "'href'", ")", "params", "=", "Parser", ".", "get_params", "(", "url", ")", "if", "Parser", ".", "is_subcategory", "(", "params", ")", "and", "not", "output", "[", "'subcategory'", "]", ":", "output", "[", "'subcategory'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_quality", "(", "params", ")", "and", "not", "output", "[", "'quality'", "]", ":", "output", "[", "'quality'", "]", "=", "td", ".", "text", "elif", "Parser", ".", "is_language", "(", "params", ")", "and", "not", "output", "[", "'language'", "]", ":", "output", "[", "'language'", "]", "=", "td", ".", "text", "return", "output" ]
Recursive dict update
def dict_update ( d , u ) : for k , v in six . iteritems ( u ) : if isinstance ( v , collections . Mapping ) : r = dict_update ( d . get ( k , { } ) , v ) d [ k ] = r else : d [ k ] = u [ k ] return d
11,245
https://github.com/viatoriche/microservices/blob/3510563edd15dc6131b8a948d6062856cd904ac7/microservices/utils/__init__.py#L39-L52
[ "def", "ensure_secret", "(", ")", ":", "home_dir", "=", "os", ".", "environ", "[", "'HOME'", "]", "file_name", "=", "home_dir", "+", "\"/.ipcamweb\"", "if", "os", ".", "path", ".", "exists", "(", "file_name", ")", ":", "with", "open", "(", "file_name", ",", "\"r\"", ")", "as", "s_file", ":", "secret", "=", "s_file", ".", "readline", "(", ")", "else", ":", "secret", "=", "os", ".", "urandom", "(", "24", ")", "with", "open", "(", "file_name", ",", "\"w\"", ")", "as", "s_file", ":", "secret", "=", "s_file", ".", "write", "(", "secret", "+", "\"\\n\"", ")", "return", "secret" ]
Retrieves the schema with the given schema_id from the registry and returns it as a dict .
def get_schema ( self , schema_id ) : res = requests . get ( self . _url ( '/schemas/ids/{}' , schema_id ) ) raise_if_failed ( res ) return json . loads ( res . json ( ) [ 'schema' ] )
11,246
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L51-L58
[ "def", "pan", "(", "self", ",", "value", ")", ":", "assert", "len", "(", "value", ")", "==", "2", "self", ".", "_pan", "[", ":", "]", "=", "value", "self", ".", "_constrain_pan", "(", ")", "self", ".", "update", "(", ")" ]
Returns the list of subject names present in the schema registry .
def get_subjects ( self ) : res = requests . get ( self . _url ( '/subjects' ) ) raise_if_failed ( res ) return res . json ( )
11,247
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L60-L66
[ "def", "disconnect_container_from_network", "(", "self", ",", "container", ",", "net_id", ",", "force", "=", "False", ")", ":", "data", "=", "{", "\"Container\"", ":", "container", "}", "if", "force", ":", "if", "version_lt", "(", "self", ".", "_version", ",", "'1.22'", ")", ":", "raise", "InvalidVersion", "(", "'Forced disconnect was introduced in API 1.22'", ")", "data", "[", "'Force'", "]", "=", "force", "url", "=", "self", ".", "_url", "(", "\"/networks/{0}/disconnect\"", ",", "net_id", ")", "res", "=", "self", ".", "_post_json", "(", "url", ",", "data", "=", "data", ")", "self", ".", "_raise_for_status", "(", "res", ")" ]
Return the list of schema version ids which have been registered under the given subject .
def get_subject_version_ids ( self , subject ) : res = requests . get ( self . _url ( '/subjects/{}/versions' , subject ) ) raise_if_failed ( res ) return res . json ( )
11,248
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L68-L75
[ "def", "GetStatus", "(", "self", ",", "Channel", ")", ":", "try", ":", "res", "=", "self", ".", "__m_dllBasic", ".", "CAN_GetStatus", "(", "Channel", ")", "return", "TPCANStatus", "(", "res", ")", "except", ":", "logger", ".", "error", "(", "\"Exception on PCANBasic.GetStatus\"", ")", "raise" ]
Retrieves the schema registered under the given subject with the given version id . Returns the schema as a dict .
def get_subject_version ( self , subject , version_id ) : res = requests . get ( self . _url ( '/subjects/{}/versions/{}' , subject , version_id ) ) raise_if_failed ( res ) return json . loads ( res . json ( ) [ 'schema' ] )
11,249
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L77-L84
[ "def", "_get_and_set_force_cache_miss", "(", "request", ")", ":", "if", "not", "(", "request", ".", "user", "and", "request", ".", "user", ".", "is_active", "and", "request", ".", "user", ".", "is_staff", ")", ":", "force_cache_miss", "=", "False", "else", ":", "force_cache_miss", "=", "request", ".", "GET", ".", "get", "(", "FORCE_CACHE_MISS_PARAM", ",", "'false'", ")", ".", "lower", "(", ")", "==", "'true'", "DEFAULT_REQUEST_CACHE", ".", "set", "(", "SHOULD_FORCE_CACHE_MISS_KEY", ",", "force_cache_miss", ")" ]
Returns True if the given schema is already registered under the given subject .
def schema_is_registered_for_subject ( self , subject , schema ) : data = json . dumps ( { 'schema' : json . dumps ( schema ) } ) res = requests . post ( self . _url ( '/subjects/{}' , subject ) , data = data , headers = HEADERS ) if res . status_code == 404 : return False raise_if_failed ( res ) return True
11,250
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L115-L125
[ "def", "convert_convolution", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "kernel_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"kernel\"", ")", ")", "stride_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"stride\"", ",", "[", "1", ",", "1", "]", ")", ")", "pad_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"pad\"", ",", "[", "0", ",", "0", "]", ")", ")", "num_group", "=", "int", "(", "attrs", ".", "get", "(", "\"num_group\"", ",", "1", ")", ")", "dilations", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"dilate\"", ",", "[", "1", ",", "1", "]", ")", ")", "pad_dims", "=", "pad_dims", "+", "pad_dims", "conv_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Conv\"", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "kernel_shape", "=", "kernel_dims", ",", "strides", "=", "stride_dims", ",", "dilations", "=", "dilations", ",", "pads", "=", "pad_dims", ",", "group", "=", "num_group", ",", "name", "=", "name", ")", "return", "[", "conv_node", "]" ]
Gets the global compatibility level .
def get_global_compatibility_level ( self ) : res = requests . get ( self . _url ( '/config' ) , headers = HEADERS ) raise_if_failed ( res ) return res . json ( ) [ 'compatibility' ]
11,251
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L152-L158
[ "def", "__get_value", "(", "self", ",", "bundleId", ",", "languageId", ",", "resourceKey", ",", "fallback", "=", "False", ")", ":", "resourceEntryData", "=", "self", ".", "__get_resource_entry_data", "(", "bundleId", "=", "bundleId", ",", "languageId", "=", "languageId", ",", "resourceKey", "=", "resourceKey", ",", "fallback", "=", "fallback", ")", "if", "not", "resourceEntryData", ":", "return", "None", "value", "=", "resourceEntryData", ".", "get", "(", "self", ".", "__RESPONSE_TRANSLATION_KEY", ")", "return", "value" ]
Sets the compatibility level for the given subject .
def set_subject_compatibility_level ( self , subject , level ) : res = requests . put ( self . _url ( '/config/{}' , subject ) , data = json . dumps ( { 'compatibility' : level } ) , headers = HEADERS ) raise_if_failed ( res )
11,252
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L160-L168
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Gets the compatibility level for the given subject .
def get_subject_compatibility_level ( self , subject ) : res = requests . get ( self . _url ( '/config/{}' , subject ) , headers = HEADERS ) raise_if_failed ( res ) return res . json ( ) [ 'compatibility' ]
11,253
https://github.com/gamechanger/confluent_schema_registry_client/blob/ac9196e366724eeb2f19f1a169fd2f9a0c8d68ae/confluent_schema_registry_client/__init__.py#L170-L176
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Parses a payload from the API guided by _api_attrs
def from_api ( cls , api , * * kwargs ) : if not cls . _api_attrs : raise NotImplementedError ( ) def resolve_attribute_type ( attr_type ) : # resolve arrays of types down to base type while isinstance ( attr_type , list ) : attr_type = attr_type [ 0 ] # attribute type 'self' resolves to current class if attr_type == 'self' : attr_type = cls # attribute type 'date' is a unix timestamp if attr_type == 'date' : attr_type = datetime . datetime . fromtimestamp # string attributes should use unicode literals if attr_type is str : attr_type = unicode # if attribute type is an APIObject, use the from_api factory method and pass the `api` argument if hasattr ( attr_type , 'from_api' ) : return lambda * * kw : attr_type . from_api ( api , * * kw ) return attr_type def instantiate_attr ( attr_value , attr_type ) : if isinstance ( attr_value , dict ) : return attr_type ( * * attr_value ) return attr_type ( attr_value ) def instantiate_array ( attr_values , attr_type ) : func = instantiate_attr if isinstance ( attr_values [ 0 ] , list ) : func = instantiate_array return [ func ( val , attr_type ) for val in attr_values ] def instantiate ( attr_value , attr_type ) : if isinstance ( attr_value , list ) : return instantiate_array ( attr_value , attr_type ) return instantiate_attr ( attr_value , attr_type ) instance = cls ( api ) for attr_name , attr_type , attr_default in cls . _api_attrs : # grab the current attribute value attr_value = kwargs . get ( attr_name , attr_default ) # default of TypeError means a required attribute, raise Exception if attr_value is TypeError : raise TypeError ( '{} requires argument {}' . format ( cls . __name__ , attr_name ) ) attr_type = resolve_attribute_type ( attr_type ) # if value has been provided from API, instantiate it using `attr_type` if attr_value != attr_default : attr_value = instantiate ( attr_value , attr_type ) # rename the 'from' variable, reserved word if attr_name == 'from' : attr_name = 'froom' # and finally set the attribute value on the instance setattr ( instance , attr_name , attr_value ) return instance
11,254
https://github.com/wrboyce/telegrambot/blob/c35ce19886df4c306a2a19851cc1f63e3066d70d/telegrambot/api/base.py#L21-L76
[ "def", "getInitialSample", "(", "self", ",", "wmg", ")", ":", "cands", "=", "range", "(", "len", "(", "wmg", ")", ")", "allPairs", "=", "itertools", ".", "combinations", "(", "cands", ",", "2", ")", "V", "=", "self", ".", "createBinaryRelation", "(", "len", "(", "cands", ")", ")", "for", "pair", "in", "allPairs", ":", "if", "wmg", "[", "pair", "[", "0", "]", "+", "1", "]", "[", "pair", "[", "1", "]", "+", "1", "]", ">", "0", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "1", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "0", "else", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "0", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "1", "return", "V" ]
Returns the api method to send the current API Object type
def api_method ( self ) : if not self . _api_method : raise NotImplementedError ( ) return getattr ( self . api , self . _api_method )
11,255
https://github.com/wrboyce/telegrambot/blob/c35ce19886df4c306a2a19851cc1f63e3066d70d/telegrambot/api/base.py#L78-L82
[ "def", "load", "(", "self", ",", "filename", ",", "offset", ")", ":", "self", ".", "offset", "=", "offset", "self", ".", "filename", "=", "filename", "self", ".", "bootsector", "=", "BootSector", "(", "filename", "=", "filename", ",", "length", "=", "NTFS_BOOTSECTOR_SIZE", ",", "offset", "=", "self", ".", "offset", ")", "self", ".", "mft_table", "=", "MftTable", "(", "mft_entry_size", "=", "self", ".", "bootsector", ".", "mft_record_size", ",", "filename", "=", "self", ".", "filename", ",", "offset", "=", "self", ".", "mft_table_offset", ")", "self", ".", "mft_table", ".", "preload_entries", "(", "NUM_SYSTEM_ENTRIES", ")", "self", ".", "_load_volume_information", "(", ")" ]
Generates a payload ready for submission to the API guided by _api_payload
def api_payload ( self ) : if not self . _api_payload : raise NotImplementedError ( ) payload = { } for attr_name in self . _api_payload : value = getattr ( self , attr_name , None ) if value is not None : payload [ attr_name ] = value return payload
11,256
https://github.com/wrboyce/telegrambot/blob/c35ce19886df4c306a2a19851cc1f63e3066d70d/telegrambot/api/base.py#L84-L93
[ "def", "normalize", "(", "X", ")", ":", "X", "=", "coo_matrix", "(", "X", ")", "X", ".", "data", "=", "X", ".", "data", "/", "sqrt", "(", "bincount", "(", "X", ".", "row", ",", "X", ".", "data", "**", "2", ")", ")", "[", "X", ".", "row", "]", "return", "X" ]
Combines api_payload and api_method to submit the current object to the API
def send ( self , * * kwargs ) : payload = self . api_payload ( ) payload . update ( * * kwargs ) return self . api_method ( ) ( * * payload )
11,257
https://github.com/wrboyce/telegrambot/blob/c35ce19886df4c306a2a19851cc1f63e3066d70d/telegrambot/api/base.py#L95-L99
[ "def", "_generate_examples_validation", "(", "self", ",", "archive", ",", "labels", ")", ":", "# Get the current random seeds.", "numpy_st0", "=", "np", ".", "random", ".", "get_state", "(", ")", "# Set new random seeds.", "np", ".", "random", ".", "seed", "(", "135", ")", "logging", ".", "warning", "(", "'Overwriting cv2 RNG seed.'", ")", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "setRNGSeed", "(", "357", ")", "for", "example", "in", "super", "(", "Imagenet2012Corrupted", ",", "self", ")", ".", "_generate_examples_validation", "(", "archive", ",", "labels", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "tf_img", "=", "tf", ".", "image", ".", "decode_jpeg", "(", "example", "[", "'image'", "]", ".", "read", "(", ")", ",", "channels", "=", "3", ")", "image_np", "=", "tfds", ".", "as_numpy", "(", "tf_img", ")", "example", "[", "'image'", "]", "=", "self", ".", "_get_corrupted_example", "(", "image_np", ")", "yield", "example", "# Reset the seeds back to their original values.", "np", ".", "random", ".", "set_state", "(", "numpy_st0", ")" ]
Parses an address
def parse_addr ( addr , * , proto = None , host = None ) : port = None if isinstance ( addr , Address ) : return addr elif isinstance ( addr , str ) : if addr . startswith ( 'http://' ) : proto , addr = 'http' , addr [ 7 : ] if addr . startswith ( 'udp://' ) : proto , addr = 'udp' , addr [ 6 : ] elif addr . startswith ( 'tcp://' ) : proto , addr = 'tcp' , addr [ 6 : ] elif addr . startswith ( 'unix://' ) : proto , addr = 'unix' , addr [ 7 : ] a , _ , b = addr . partition ( ':' ) host = a or host port = b or port elif isinstance ( addr , ( tuple , list ) ) : # list is not good a , b = addr host = a or host port = b or port elif isinstance ( addr , int ) : port = addr else : raise ValueError ( 'bad value' ) if port is not None : port = int ( port ) return Address ( proto , host , port )
11,258
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/common/addr.py#L12-L46
[ "def", "get_samples", "(", "self", ",", "n", ")", ":", "normalized_w", "=", "self", ".", "weights", "/", "np", ".", "sum", "(", "self", ".", "weights", ")", "get_rand_index", "=", "st", ".", "rv_discrete", "(", "values", "=", "(", "range", "(", "self", ".", "N", ")", ",", "normalized_w", ")", ")", ".", "rvs", "(", "size", "=", "n", ")", "samples", "=", "np", ".", "zeros", "(", "n", ")", "k", "=", "0", "j", "=", "0", "while", "(", "k", "<", "n", ")", ":", "i", "=", "get_rand_index", "[", "j", "]", "j", "=", "j", "+", "1", "if", "(", "j", "==", "n", ")", ":", "get_rand_index", "=", "st", ".", "rv_discrete", "(", "values", "=", "(", "range", "(", "self", ".", "N", ")", ",", "normalized_w", ")", ")", ".", "rvs", "(", "size", "=", "n", ")", "j", "=", "0", "v", "=", "np", ".", "random", ".", "normal", "(", "loc", "=", "self", ".", "points", "[", "i", "]", ",", "scale", "=", "self", ".", "sigma", "[", "i", "]", ")", "if", "(", "v", ">", "self", ".", "max_limit", "or", "v", "<", "self", ".", "min_limit", ")", ":", "continue", "else", ":", "samples", "[", "k", "]", "=", "v", "k", "=", "k", "+", "1", "if", "(", "k", "==", "n", ")", ":", "break", "return", "samples" ]
Applies the rule from the builder system to this line edit .
def applyRule ( self ) : widget = self . queryBuilderWidget ( ) if ( not widget ) : return rule = widget . findRule ( self . uiTermDDL . currentText ( ) ) self . setCurrentRule ( rule )
11,259
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xquerybuilderwidget/xquerylinewidget.py#L53-L62
[ "def", "assemble", "(", "cls", ",", "header_json", ",", "metadata_json", ",", "content_json", ")", ":", "try", ":", "header", "=", "json_decode", "(", "header_json", ")", "except", "ValueError", ":", "raise", "MessageError", "(", "\"header could not be decoded\"", ")", "try", ":", "metadata", "=", "json_decode", "(", "metadata_json", ")", "except", "ValueError", ":", "raise", "MessageError", "(", "\"metadata could not be decoded\"", ")", "try", ":", "content", "=", "json_decode", "(", "content_json", ")", "except", "ValueError", ":", "raise", "MessageError", "(", "\"content could not be decoded\"", ")", "msg", "=", "cls", "(", "header", ",", "metadata", ",", "content", ")", "msg", ".", "_header_json", "=", "header_json", "msg", ".", "_metadata_json", "=", "metadata_json", "msg", ".", "_content_json", "=", "content_json", "return", "msg" ]
Updates the editor based on the current selection .
def updateEditor ( self ) : # assignt the rule operators to the choice list rule = self . currentRule ( ) operator = self . currentOperator ( ) widget = self . uiWidgetAREA . widget ( ) editorType = None text = '' if ( rule ) : editorType = rule . editorType ( operator ) # no change in types if ( widget and editorType and type ( widget ) == editorType ) : return elif ( widget ) : if ( type ( widget ) != QWidget ) : text = widget . text ( ) widget . setParent ( None ) widget . deleteLater ( ) self . uiWidgetAREA . setWidget ( None ) # create the new editor if ( editorType ) : widget = editorType ( self ) if ( isinstance ( widget , QLineEdit ) ) : terms = rule . completionTerms ( ) if ( not terms ) : qwidget = self . queryBuilderWidget ( ) if ( qwidget ) : terms = qwidget . completionTerms ( ) if ( terms ) : widget . setCompleter ( XQueryCompleter ( terms , widget ) ) self . uiWidgetAREA . setWidget ( widget ) if ( type ( widget ) != QWidget ) : widget . setText ( text )
11,260
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xquerybuilderwidget/xquerylinewidget.py#L278-L323
[ "def", "_readASCII", "(", "self", ",", "filename", ")", ":", "self", ".", "waveunits", "=", "units", ".", "Units", "(", "'angstrom'", ")", "self", ".", "fluxunits", "=", "units", ".", "Units", "(", "'flam'", ")", "wlist", ",", "flist", "=", "self", ".", "_columnsFromASCII", "(", "filename", ")", "self", ".", "_wavetable", "=", "N", ".", "array", "(", "wlist", ",", "dtype", "=", "N", ".", "float64", ")", "self", ".", "_fluxtable", "=", "N", ".", "array", "(", "flist", ",", "dtype", "=", "N", ".", "float64", ")" ]
Emits the current schema changed signal for this combobox provided \ the signals aren t blocked .
def emitCurrentChanged ( self ) : if ( not self . signalsBlocked ( ) ) : schema = self . currentSchema ( ) self . currentSchemaChanged . emit ( schema ) if ( schema ) : self . currentTableChanged . emit ( schema . model ( ) ) else : self . currentTableChanged . emit ( None )
11,261
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xorbschemabox.py#L59-L70
[ "def", "generate_timing_stats", "(", "file_list", ",", "var_list", ")", ":", "timing_result", "=", "dict", "(", ")", "timing_summary", "=", "dict", "(", ")", "for", "file", "in", "file_list", ":", "timing_result", "[", "file", "]", "=", "functions", ".", "parse_gptl", "(", "file", ",", "var_list", ")", "for", "var", "in", "var_list", ":", "var_time", "=", "[", "]", "for", "f", ",", "data", "in", "timing_result", ".", "items", "(", ")", ":", "try", ":", "var_time", ".", "append", "(", "data", "[", "var", "]", ")", "except", ":", "continue", "if", "len", "(", "var_time", ")", ":", "timing_summary", "[", "var", "]", "=", "{", "'mean'", ":", "np", ".", "mean", "(", "var_time", ")", ",", "'max'", ":", "np", ".", "max", "(", "var_time", ")", ",", "'min'", ":", "np", ".", "min", "(", "var_time", ")", ",", "'std'", ":", "np", ".", "std", "(", "var_time", ")", "}", "return", "timing_summary" ]
Saves the current settings for the actions in the list and exits the widget .
def save ( self ) : if ( not self . updateShortcut ( ) ) : return False for i in range ( self . uiActionTREE . topLevelItemCount ( ) ) : item = self . uiActionTREE . topLevelItem ( i ) action = item . action ( ) action . setShortcut ( QKeySequence ( item . text ( 1 ) ) ) return True
11,262
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/configs/xshortcutconfig/xshortcutwidget.py#L119-L132
[ "def", "is_extension_type", "(", "arr", ")", ":", "if", "is_categorical", "(", "arr", ")", ":", "return", "True", "elif", "is_sparse", "(", "arr", ")", ":", "return", "True", "elif", "is_datetime64tz_dtype", "(", "arr", ")", ":", "return", "True", "return", "False" ]
Shows the popup for this button .
def showPopup ( self ) : as_dialog = QApplication . keyboardModifiers ( ) anchor = self . defaultAnchor ( ) if anchor : self . popupWidget ( ) . setAnchor ( anchor ) else : anchor = self . popupWidget ( ) . anchor ( ) if ( anchor & ( XPopupWidget . Anchor . BottomLeft | XPopupWidget . Anchor . BottomCenter | XPopupWidget . Anchor . BottomRight ) ) : pos = QPoint ( self . width ( ) / 2 , 0 ) else : pos = QPoint ( self . width ( ) / 2 , self . height ( ) ) pos = self . mapToGlobal ( pos ) if not self . signalsBlocked ( ) : self . popupAboutToShow . emit ( ) self . _popupWidget . popup ( pos ) if as_dialog : self . _popupWidget . setCurrentMode ( XPopupWidget . Mode . Dialog )
11,263
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xpopupbutton.py#L128-L154
[ "def", "handle_error", "(", "self", ",", "error", ",", "req", ",", "schema", ",", "error_status_code", ",", "error_headers", ")", ":", "status_code", "=", "error_status_code", "or", "self", ".", "DEFAULT_VALIDATION_STATUS", "if", "status_code", "==", "422", ":", "reason", "=", "\"Unprocessable Entity\"", "else", ":", "reason", "=", "None", "raise", "HTTPError", "(", "status_code", ",", "log_message", "=", "str", "(", "error", ".", "messages", ")", ",", "reason", "=", "reason", ",", "messages", "=", "error", ".", "messages", ",", "headers", "=", "error_headers", ",", ")" ]
Toggles whether or not the popup is visible .
def togglePopup ( self ) : if not self . _popupWidget . isVisible ( ) : self . showPopup ( ) elif self . _popupWidget . currentMode ( ) != self . _popupWidget . Mode . Dialog : self . _popupWidget . close ( )
11,264
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/widgets/xpopupbutton.py#L156-L163
[ "def", "_stream_annotation", "(", "file_name", ",", "pb_dir", ")", ":", "# Full url of annotation file", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "pb_dir", ",", "file_name", ")", "# Get the content", "response", "=", "requests", ".", "get", "(", "url", ")", "# Raise HTTPError if invalid url", "response", ".", "raise_for_status", "(", ")", "# Convert to numpy array", "ann_data", "=", "np", ".", "fromstring", "(", "response", ".", "content", ",", "dtype", "=", "np", ".", "dtype", "(", "'<u1'", ")", ")", "return", "ann_data" ]
Returns appropriate form field .
def form_field ( self ) : label = unicode ( self ) defaults = dict ( required = False , label = label , widget = self . widget ) defaults . update ( self . extra ) return self . field_class ( * * defaults )
11,265
https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/facets.py#L69-L74
[ "def", "schedule_snapshot", "(", "self", ")", ":", "# Notes:", "# - Snapshots are not immediate.", "# - Snapshots will be cached for predefined amount", "# of time.", "# - Snapshots are not balanced. To get a better", "# image, it must be taken from the stream, a few", "# seconds after stream start.", "url", "=", "SNAPSHOTS_ENDPOINT", "params", "=", "SNAPSHOTS_BODY", "params", "[", "'from'", "]", "=", "\"{0}_web\"", ".", "format", "(", "self", ".", "user_id", ")", "params", "[", "'to'", "]", "=", "self", ".", "device_id", "params", "[", "'resource'", "]", "=", "\"cameras/{0}\"", ".", "format", "(", "self", ".", "device_id", ")", "params", "[", "'transId'", "]", "=", "\"web!{0}\"", ".", "format", "(", "self", ".", "xcloud_id", ")", "# override headers", "headers", "=", "{", "'xCloudId'", ":", "self", ".", "xcloud_id", "}", "_LOGGER", ".", "debug", "(", "\"Snapshot device %s\"", ",", "self", ".", "name", ")", "_LOGGER", ".", "debug", "(", "\"Device params %s\"", ",", "params", ")", "_LOGGER", ".", "debug", "(", "\"Device headers %s\"", ",", "headers", ")", "ret", "=", "self", ".", "_session", ".", "query", "(", "url", ",", "method", "=", "'POST'", ",", "extra_params", "=", "params", ",", "extra_headers", "=", "headers", ")", "_LOGGER", ".", "debug", "(", "\"Snapshot results %s\"", ",", "ret", ")", "return", "ret", "is", "not", "None", "and", "ret", ".", "get", "(", "'success'", ")" ]
Returns attribute name for this facet
def attr_name ( self ) : return self . schema . name if self . schema else self . field . name
11,266
https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/facets.py#L77-L79
[ "def", "recv", "(", "self", ")", ":", "LOGGER", ".", "debug", "(", "'Receiving'", ")", "try", ":", "message_length", "=", "struct", ".", "unpack", "(", "'>i'", ",", "self", ".", "_socket", ".", "recv", "(", "4", ")", ")", "[", "0", "]", "message_length", "-=", "Connection", ".", "COMM_LENGTH", "LOGGER", ".", "debug", "(", "'Length: %i'", ",", "message_length", ")", "except", "socket", ".", "timeout", ":", "return", "None", "comm_status", "=", "struct", ".", "unpack", "(", "'>i'", ",", "self", ".", "_socket", ".", "recv", "(", "4", ")", ")", "[", "0", "]", "LOGGER", ".", "debug", "(", "'Status: %i'", ",", "comm_status", ")", "bytes_received", "=", "0", "message", "=", "b\"\"", "while", "bytes_received", "<", "message_length", ":", "if", "message_length", "-", "bytes_received", ">=", "1024", ":", "recv_len", "=", "1024", "else", ":", "recv_len", "=", "message_length", "-", "bytes_received", "bytes_received", "+=", "recv_len", "LOGGER", ".", "debug", "(", "'Received %i'", ",", "bytes_received", ")", "message", "+=", "self", ".", "_socket", ".", "recv", "(", "recv_len", ")", "if", "comm_status", "==", "0", ":", "message", "=", "self", ".", "_crypt", ".", "decrypt", "(", "message", ")", "else", ":", "return", "Message", "(", "len", "(", "message", ")", ",", "Connection", ".", "COMM_ERROR", ",", "message", ")", "msg", "=", "Message", "(", "message_length", ",", "comm_status", ",", "message", ")", "return", "msg" ]
Returns field instance and lookup prefix for given attribute name . Can be overloaded in subclasses to provide filtering across multiple models .
def get_field_and_lookup ( self , name ) : name = self . get_queryset ( ) . model . _meta . get_field ( name ) lookup_prefix = '' return name , lookup_prefix
11,267
https://github.com/neithere/eav-django/blob/7f2e9fe17bbe740622cfb38f6ce0e8413b7da3d7/eav/facets.py#L304-L311
[ "def", "is_sync_table", "(", "self", ",", "archive", ",", "interval", ",", "*", "*", "import_args", ")", ":", "return", "(", "hasattr", "(", "archive", ",", "\"startswith\"", ")", "and", "archive", ".", "startswith", "(", "\"http\"", ")", "or", "\"connection\"", "in", "import_args", ")", "and", "interval", "is", "not", "None" ]
Dispatches metrics streamed by collector
def StreamMetrics ( self , request_iterator , context ) : LOG . debug ( "StreamMetrics called" ) # set up arguments collect_args = ( next ( request_iterator ) ) max_metrics_buffer = 0 max_collect_duration = 0 cfg = Metric ( pb = collect_args . Metrics_Arg . metrics [ 0 ] ) try : max_metrics_buffer = int ( cfg . config [ "max-metrics-buffer" ] ) except Exception as ex : LOG . debug ( "Unable to get schedule parameters: {}" . format ( ex ) ) try : max_collect_duration = int ( cfg . config [ "max-collect-duration" ] ) except Exception as ex : LOG . debug ( "Unable to get schedule parameters: {}" . format ( ex ) ) if max_metrics_buffer > 0 : self . max_metrics_buffer = max_metrics_buffer if max_collect_duration > 0 : self . max_collect_duration = max_collect_duration # start collection thread thread = threading . Thread ( target = self . _stream_wrapper , args = ( collect_args , ) , ) thread . daemon = True thread . start ( ) # stream metrics metrics = [ ] metrics_to_stream = [ ] stream_timeout = self . max_collect_duration while context . is_active ( ) : try : # wait for metrics until timeout is reached t_start = time . time ( ) metrics = self . metrics_queue . get ( block = True , timeout = stream_timeout ) elapsed = round ( time . time ( ) - t_start ) stream_timeout -= elapsed except queue . Empty : LOG . debug ( "Max collect duration exceeded. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col else : for metric in metrics : metrics_to_stream . append ( metric ) if len ( metrics_to_stream ) == self . max_metrics_buffer : LOG . debug ( "Max metrics buffer reached. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col # stream metrics if max_metrics_buffer is 0 or enough metrics has been collected if self . max_metrics_buffer == 0 : LOG . debug ( "Max metrics buffer set to 0. Streaming {} metrics" . format ( len ( metrics_to_stream ) ) ) metrics_col = CollectReply ( Metrics_Reply = MetricsReply ( metrics = [ m . pb for m in metrics_to_stream ] ) ) metrics_to_stream = [ ] stream_timeout = self . max_collect_duration yield metrics_col # sent notification if stream has been stopped self . done_queue . put ( True )
11,268
https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/stream_collector_proxy.py#L57-L120
[ "def", "create", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "super", "(", "ImageMemberManager", ",", "self", ")", ".", "create", "(", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "e", ".", "http_status", "==", "403", ":", "raise", "exc", ".", "UnsharableImage", "(", "\"You cannot share a public image.\"", ")", "else", ":", "raise" ]
Dispatches the request to the plugins update_catalog method
def GetMetricTypes ( self , request , context ) : LOG . debug ( "GetMetricTypes called" ) try : metrics = self . plugin . update_catalog ( ConfigMap ( pb = request . config ) ) return MetricsReply ( metrics = [ m . pb for m in metrics ] ) except Exception as err : msg = "message: {}\n\nstack trace: {}" . format ( err , traceback . format_exc ( ) ) return MetricsReply ( metrics = [ ] , error = msg )
11,269
https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/stream_collector_proxy.py#L122-L131
[ "def", "batch_star", "(", "self", ",", "path", ")", ":", "if", "lib", ".", "EnvBatchStar", "(", "self", ".", "_env", ",", "path", ".", "encode", "(", ")", ")", "!=", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")" ]
Publish a MQTT message
def command_publish ( self , command , * * kwargs ) : mqttc = mqtt . Client ( ) mqttc . connect ( command [ 'host' ] , port = int ( command [ 'port' ] ) ) mqttc . loop_start ( ) try : mqttc . publish ( command [ 'endpoint' ] , command [ 'payload' ] ) finally : mqttc . loop_stop ( force = False )
11,270
https://github.com/davidemoro/play_mqtt/blob/4994074c20ab8a5abd221f8b8088e5fc44ba2a5e/play_mqtt/providers.py#L8-L22
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Subscribe to a topic or list of topics
def command_subscribe ( self , command , * * kwargs ) : topic = command [ 'topic' ] encoding = command . get ( 'encoding' , 'utf-8' ) name = command [ 'name' ] if not hasattr ( self . engine , '_mqtt' ) : self . engine . _mqtt = { } self . engine . variables [ name ] = [ ] def on_message ( client , userdata , msg ) : userdata . append ( msg . payload . decode ( encoding ) ) self . engine . _mqtt [ name ] = client = mqtt . Client ( userdata = self . engine . variables [ name ] ) client . on_message = on_message client . connect ( command [ 'host' ] , port = int ( command [ 'port' ] ) ) client . subscribe ( topic ) client . loop_start ( ) self . engine . register_teardown_callback ( client . loop_stop )
11,271
https://github.com/davidemoro/play_mqtt/blob/4994074c20ab8a5abd221f8b8088e5fc44ba2a5e/play_mqtt/providers.py#L24-L45
[ "def", "send_ftp", "(", "outdir", ")", ":", "print", "(", "\"Uploading the files in the \"", "+", "outdir", "+", "\"/ directory!\\n\"", ")", "# Make sure there is actually a configuration file", "config_file_dir", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "\"config.py\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "config_file_dir", ")", ":", "sys", ".", "exit", "(", "\"There dosen't seem to be a configuration file. Have you run the init command?\"", ")", "else", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "cwd", ")", "try", ":", "from", "config", "import", "ftp_server", ",", "ftp_username", ",", "ftp_password", ",", "ftp_port", ",", "ftp_upload_path", "except", ":", "sys", ".", "exit", "(", "\"The FTP settings could not be found. Maybe your config file is too old. Re-run 'blended init' to fix it.\"", ")", "server", "=", "ftp_server", "username", "=", "ftp_username", "password", "=", "ftp_password", "port", "=", "ftp_port", "ftp", "=", "FTP", "(", ")", "ftp", ".", "connect", "(", "server", ",", "port", ")", "ftp", ".", "login", "(", "username", ",", "password", ")", "filenameCV", "=", "os", ".", "path", ".", "join", "(", "cwd", ",", "outdir", ")", "try", ":", "ftp", ".", "cwd", "(", "ftp_upload_path", ")", "placeFiles", "(", "ftp", ",", "filenameCV", ")", "except", ":", "ftp", ".", "quit", "(", ")", "sys", ".", "exit", "(", "\"Files not able to be uploaded! Are you sure the directory exists?\"", ")", "ftp", ".", "quit", "(", ")", "print", "(", "\"\\nFTP Done!\"", ")" ]
Retrieve weather parameter .
def get_data ( param , data ) : try : for ( _ , selected_time_entry ) in data : loc_data = selected_time_entry [ 'location' ] if param not in loc_data : continue if param == 'precipitation' : new_state = loc_data [ param ] [ '@value' ] elif param == 'symbol' : new_state = int ( float ( loc_data [ param ] [ '@number' ] ) ) elif param in ( 'temperature' , 'pressure' , 'humidity' , 'dewpointTemperature' ) : new_state = round ( float ( loc_data [ param ] [ '@value' ] ) , 1 ) elif param in ( 'windSpeed' , 'windGust' ) : new_state = round ( float ( loc_data [ param ] [ '@mps' ] ) * 3.6 , 1 ) elif param == 'windDirection' : new_state = round ( float ( loc_data [ param ] [ '@deg' ] ) , 1 ) elif param in ( 'fog' , 'cloudiness' , 'lowClouds' , 'mediumClouds' , 'highClouds' ) : new_state = round ( float ( loc_data [ param ] [ '@percent' ] ) , 1 ) return new_state except ( ValueError , IndexError , KeyError ) : return None
11,272
https://github.com/Danielhiversen/pyMetno/blob/7d200a495fdea0e1a9310069fdcd65f205d6e6f5/metno/__init__.py#L144-L168
[ "def", "remove", "(", "app_id", ")", ":", "cmd", "=", "'sqlite3 \"/Library/Application Support/com.apple.TCC/TCC.db\" '", "'\"DELETE from access where client=\\'{0}\\'\"'", ".", "format", "(", "app_id", ")", "call", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "output_loglevel", "=", "'debug'", ",", "python_shell", "=", "False", ")", "if", "call", "[", "'retcode'", "]", "!=", "0", ":", "comment", "=", "''", "if", "'stderr'", "in", "call", ":", "comment", "+=", "call", "[", "'stderr'", "]", "if", "'stdout'", "in", "call", ":", "comment", "+=", "call", "[", "'stdout'", "]", "raise", "CommandExecutionError", "(", "'Error removing app: {0}'", ".", "format", "(", "comment", ")", ")", "return", "True" ]
Parse datetime .
def parse_datetime ( dt_str ) : date_format = "%Y-%m-%dT%H:%M:%S %z" dt_str = dt_str . replace ( "Z" , " +0000" ) return datetime . datetime . strptime ( dt_str , date_format )
11,273
https://github.com/Danielhiversen/pyMetno/blob/7d200a495fdea0e1a9310069fdcd65f205d6e6f5/metno/__init__.py#L257-L261
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Get the latest data from met . no .
async def fetching_data ( self , * _ ) : try : with async_timeout . timeout ( 10 ) : resp = await self . _websession . get ( self . _api_url , params = self . _urlparams ) if resp . status != 200 : _LOGGER . error ( '%s returned %s' , self . _api_url , resp . status ) return False text = await resp . text ( ) except ( asyncio . TimeoutError , aiohttp . ClientError ) as err : _LOGGER . error ( '%s returned %s' , self . _api_url , err ) return False try : self . data = xmltodict . parse ( text ) [ 'weatherdata' ] except ( ExpatError , IndexError ) as err : _LOGGER . error ( '%s returned %s' , resp . url , err ) return False return True
11,274
https://github.com/Danielhiversen/pyMetno/blob/7d200a495fdea0e1a9310069fdcd65f205d6e6f5/metno/__init__.py#L75-L93
[ "def", "logout", "(", "client", ")", ":", "if", "not", "client", ".", "session_id", ":", "client", ".", "request_session", "(", ")", "concierge_request_header", "=", "client", ".", "construct_concierge_header", "(", "url", "=", "(", "\"http://membersuite.com/contracts/IConciergeAPIService/\"", "\"Logout\"", ")", ")", "logout_result", "=", "client", ".", "client", ".", "service", ".", "Logout", "(", "_soapheaders", "=", "[", "concierge_request_header", "]", ")", "result", "=", "logout_result", "[", "\"body\"", "]", "[", "\"LogoutResult\"", "]", "if", "result", "[", "\"SessionID\"", "]", "is", "None", ":", "# Success!", "client", ".", "session_id", "=", "None", "else", ":", "# Failure . . .", "raise", "LogoutError", "(", "result", "=", "result", ")" ]
Get the forecast weather data from met . no .
def get_forecast ( self , time_zone ) : if self . data is None : return [ ] now = datetime . datetime . now ( time_zone ) . replace ( hour = 12 , minute = 0 , second = 0 , microsecond = 0 ) times = [ now + datetime . timedelta ( days = k ) for k in range ( 1 , 6 ) ] return [ self . get_weather ( _time ) for _time in times ]
11,275
https://github.com/Danielhiversen/pyMetno/blob/7d200a495fdea0e1a9310069fdcd65f205d6e6f5/metno/__init__.py#L99-L107
[ "def", "_create_justification_button", "(", "self", ")", ":", "iconnames", "=", "[", "\"JustifyLeft\"", ",", "\"JustifyCenter\"", ",", "\"JustifyRight\"", "]", "bmplist", "=", "[", "icons", "[", "iconname", "]", "for", "iconname", "in", "iconnames", "]", "self", ".", "justify_tb", "=", "_widgets", ".", "BitmapToggleButton", "(", "self", ",", "bmplist", ")", "self", ".", "justify_tb", ".", "SetToolTipString", "(", "_", "(", "u\"Justification\"", ")", ")", "self", ".", "Bind", "(", "wx", ".", "EVT_BUTTON", ",", "self", ".", "OnJustification", ",", "self", ".", "justify_tb", ")", "self", ".", "AddControl", "(", "self", ".", "justify_tb", ")" ]
Get the current weather data from met . no .
def get_weather ( self , time , max_hour = 6 ) : if self . data is None : return { } ordered_entries = [ ] for time_entry in self . data [ 'product' ] [ 'time' ] : valid_from = parse_datetime ( time_entry [ '@from' ] ) valid_to = parse_datetime ( time_entry [ '@to' ] ) if time > valid_to : # Has already passed. Never select this. continue average_dist = ( abs ( ( valid_to - time ) . total_seconds ( ) ) + abs ( ( valid_from - time ) . total_seconds ( ) ) ) if average_dist > max_hour * 3600 : continue ordered_entries . append ( ( average_dist , time_entry ) ) if not ordered_entries : return { } ordered_entries . sort ( key = lambda item : item [ 0 ] ) res = dict ( ) res [ 'datetime' ] = time res [ 'temperature' ] = get_data ( 'temperature' , ordered_entries ) res [ 'condition' ] = CONDITIONS . get ( get_data ( 'symbol' , ordered_entries ) ) res [ 'pressure' ] = get_data ( 'pressure' , ordered_entries ) res [ 'humidity' ] = get_data ( 'humidity' , ordered_entries ) res [ 'wind_speed' ] = get_data ( 'windSpeed' , ordered_entries ) res [ 'wind_bearing' ] = get_data ( 'windDirection' , ordered_entries ) return res
11,276
https://github.com/Danielhiversen/pyMetno/blob/7d200a495fdea0e1a9310069fdcd65f205d6e6f5/metno/__init__.py#L109-L141
[ "def", "TxKazooClient", "(", "reactor", ",", "pool", ",", "client", ")", ":", "make_thimble", "=", "partial", "(", "Thimble", ",", "reactor", ",", "pool", ")", "wrapper", "=", "_RunCallbacksInReactorThreadWrapper", "(", "reactor", ",", "client", ")", "client_thimble", "=", "make_thimble", "(", "wrapper", ",", "_blocking_client_methods", ")", "def", "_Lock", "(", "path", ",", "identifier", "=", "None", ")", ":", "\"\"\"Return a wrapped :class:`kazoo.recipe.lock.Lock` for this client.\"\"\"", "lock", "=", "client", ".", "Lock", "(", "path", ",", "identifier", ")", "return", "Thimble", "(", "reactor", ",", "pool", ",", "lock", ",", "_blocking_lock_methods", ")", "client_thimble", ".", "Lock", "=", "_Lock", "client_thimble", ".", "SetPartitioner", "=", "partial", "(", "_SetPartitionerWrapper", ",", "reactor", ",", "pool", ",", "client", ")", "# Expose these so e.g. recipes can access them from the kzclient", "client", ".", "reactor", "=", "reactor", "client", ".", "pool", "=", "pool", "client", ".", "kazoo_client", "=", "client", "return", "client_thimble" ]
Prepare node for catalog endpoint
def prepare_node ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } # from /v1/health/service/<service> if all ( field in data for field in ( "Node" , "Service" , "Checks" ) ) : return data [ "Node" ] [ "Node" ] , data [ "Node" ] result = { } if "ID" in data : result [ "Node" ] = data [ "ID" ] for k in ( "Datacenter" , "Node" , "Address" , "TaggedAddresses" , "Service" , "Check" , "Checks" ) : if k in data : result [ k ] = data [ k ] if list ( result ) == [ "Node" ] : return result [ "Node" ] , { } return result . get ( "Node" ) , result
11,277
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/util.py#L3-L46
[ "def", "assign", "(", "A", ",", "attr", ",", "B", ",", "lock", "=", "False", ")", ":", "class", "NoAttr", "(", "object", ")", ":", "pass", "context", "=", "threading", ".", "Lock", "if", "lock", "else", "null_context", "with", "context", "(", ")", ":", "if", "not", "hasattr", "(", "A", ",", "attr", ")", ":", "tmp", "=", "NoAttr", "else", ":", "tmp", "=", "getattr", "(", "A", ",", "attr", ")", "setattr", "(", "A", ",", "attr", ",", "B", ")", "try", ":", "yield", "B", "finally", ":", "if", "tmp", "is", "NoAttr", ":", "delattr", "(", "A", ",", "attr", ")", "else", ":", "setattr", "(", "A", ",", "attr", ",", "tmp", ")" ]
Prepare service for catalog endpoint
def prepare_service ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } # from /v1/health/service/<service> if all ( field in data for field in ( "Node" , "Service" , "Checks" ) ) : return data [ "Service" ] [ "ID" ] , data [ "Service" ] # from /v1/health/checks/<service> # from /v1/health/node/<node> # from /v1/health/state/<state> # from /v1/catalog/service/<service> if all ( field in data for field in ( "ServiceName" , "ServiceID" ) ) : return data [ "ServiceID" ] , { "ID" : data [ "ServiceID" ] , "Service" : data [ "ServiceName" ] , "Tags" : data . get ( "ServiceTags" ) , "Address" : data . get ( "ServiceAddress" ) , "Port" : data . get ( "ServicePort" ) , } if list ( data ) == [ "ID" ] : return data [ "ID" ] , { } result = { } if "Name" in data : result [ "Service" ] = data [ "Name" ] for k in ( "Service" , "ID" , "Tags" , "Address" , "Port" ) : if k in data : result [ k ] = data [ k ] return result . get ( "ID" ) , result
11,278
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/util.py#L49-L125
[ "def", "accept", "(", "self", ")", ":", "filetypes", "=", "'PNG Files (*.png);;JPG Files (*.jpg);;All Files (*.*)'", "filename", "=", "QFileDialog", ".", "getSaveFileName", "(", "None", ",", "'Save Snapshot'", ",", "self", ".", "filepath", "(", ")", ",", "filetypes", ")", "if", "type", "(", "filename", ")", "==", "tuple", ":", "filename", "=", "filename", "[", "0", "]", "filename", "=", "nativestring", "(", "filename", ")", "if", "not", "filename", ":", "self", ".", "reject", "(", ")", "else", ":", "self", ".", "setFilepath", "(", "filename", ")", "self", ".", "save", "(", ")" ]
Prepare check for catalog endpoint
def prepare_check ( data ) : if not data : return None , { } if isinstance ( data , str ) : return data , { } result = { } if "ID" in data : result [ "CheckID" ] = data [ "ID" ] for k in ( "Node" , "CheckID" , "Name" , "Notes" , "Status" , "ServiceID" ) : if k in data : result [ k ] = data [ k ] if list ( result ) == [ "CheckID" ] : return result [ "CheckID" ] , { } return result . get ( "CheckID" ) , result
11,279
https://github.com/johnnoone/aioconsul/blob/02f7a529d7dc2e49bed942111067aa5faf320e90/aioconsul/client/util.py#L128-L150
[ "def", "accept", "(", "self", ")", ":", "filetypes", "=", "'PNG Files (*.png);;JPG Files (*.jpg);;All Files (*.*)'", "filename", "=", "QFileDialog", ".", "getSaveFileName", "(", "None", ",", "'Save Snapshot'", ",", "self", ".", "filepath", "(", ")", ",", "filetypes", ")", "if", "type", "(", "filename", ")", "==", "tuple", ":", "filename", "=", "filename", "[", "0", "]", "filename", "=", "nativestring", "(", "filename", ")", "if", "not", "filename", ":", "self", ".", "reject", "(", ")", "else", ":", "self", ".", "setFilepath", "(", "filename", ")", "self", ".", "save", "(", ")" ]
all options set to default
def optimize_no ( self ) : self . optimization = 0 self . relax = False self . gc_sections = False self . ffunction_sections = False self . fdata_sections = False self . fno_inline_small_functions = False
11,280
https://github.com/ponty/pyavrutils/blob/7a396a25b3ac076ede07b5cd5cbd416ebb578a28/pyavrutils/avrgcc.py#L76-L84
[ "def", "analyzePython", "(", "code_text", ")", ":", "code", ",", "comment", ",", "docstr", "=", "0", ",", "0", ",", "0", "p1", "=", "r\"\"\"(?<=%s)[\\s\\S]*?(?=%s)\"\"\"", "%", "(", "'\"\"\"'", ",", "'\"\"\"'", ")", "p2", "=", "r\"\"\"(?<=%s)[\\s\\S]*?(?=%s)\"\"\"", "%", "(", "\"'''\"", ",", "\"'''\"", ")", "# count docstr", "for", "pattern", "in", "[", "p1", ",", "p2", "]", ":", "for", "res", "in", "re", ".", "findall", "(", "pattern", ",", "code_text", ")", "[", ":", ":", "2", "]", ":", "lines", "=", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "res", ".", "split", "(", "\"\\n\"", ")", "if", "i", ".", "strip", "(", ")", "]", "docstr", "+=", "len", "(", "lines", ")", "# count comment line and code", "lines", "=", "[", "i", ".", "strip", "(", ")", "for", "i", "in", "code_text", ".", "split", "(", "\"\\n\"", ")", "if", "i", ".", "strip", "(", ")", "]", "for", "line", "in", "lines", ":", "if", "line", ".", "startswith", "(", "\"#\"", ")", ":", "comment", "+=", "1", "else", ":", "code", "+=", "1", "purecode", "=", "code", "-", "docstr", "# pure code = code - docstr", "return", "code", ",", "comment", ",", "docstr", ",", "purecode" ]
Initializes the plugins for this resource manager .
def init ( self ) : # import any compiled resource modules if not self . _initialized : self . _initialized = True wrap = projexui . qt . QT_WRAPPER . lower ( ) ignore = lambda x : not x . split ( '.' ) [ - 1 ] . startswith ( wrap ) projex . importmodules ( self . plugins ( ) , ignore = ignore )
11,281
https://github.com/bitesofcode/projexui/blob/f18a73bec84df90b034ca69b9deea118dbedfc4d/projexui/xresourcemanager.py#L244-L253
[ "def", "setOverlayTransformOverlayRelative", "(", "self", ",", "ulOverlayHandle", ",", "ulOverlayHandleParent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformOverlayRelative", "pmatParentOverlayToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "ulOverlayHandleParent", ",", "byref", "(", "pmatParentOverlayToOverlayTransform", ")", ")", "return", "result", ",", "pmatParentOverlayToOverlayTransform" ]
Return list of policies zipped with their respective data type
def policies ( self ) : policies = [ self . _pb . integer_policy , self . _pb . float_policy , self . _pb . string_policy , self . _pb . bool_policy ] key_types = [ "integer" , "float" , "string" , "bool" ] return zip ( key_types , policies )
11,282
https://github.com/intelsdi-x/snap-plugin-lib-py/blob/8da5d00ac5f9d2b48a7239563ac7788209891ca4/snap_plugin/v1/config_policy.py#L140-L145
[ "def", "_digitize_lons", "(", "lons", ",", "lon_bins", ")", ":", "if", "cross_idl", "(", "lon_bins", "[", "0", "]", ",", "lon_bins", "[", "-", "1", "]", ")", ":", "idx", "=", "numpy", ".", "zeros_like", "(", "lons", ",", "dtype", "=", "numpy", ".", "int", ")", "for", "i_lon", "in", "range", "(", "len", "(", "lon_bins", ")", "-", "1", ")", ":", "extents", "=", "get_longitudinal_extent", "(", "lons", ",", "lon_bins", "[", "i_lon", "+", "1", "]", ")", "lon_idx", "=", "extents", ">", "0", "if", "i_lon", "!=", "0", ":", "extents", "=", "get_longitudinal_extent", "(", "lon_bins", "[", "i_lon", "]", ",", "lons", ")", "lon_idx", "&=", "extents", ">=", "0", "idx", "[", "lon_idx", "]", "=", "i_lon", "return", "numpy", ".", "array", "(", "idx", ")", "else", ":", "return", "numpy", ".", "digitize", "(", "lons", ",", "lon_bins", ")", "-", "1" ]
Add measurement data to the submission buffer for eventual writing to InfluxDB .
def add_measurement ( measurement ) : global _buffer_size if not _enabled : LOGGER . debug ( 'Discarding measurement for %s while not enabled' , measurement . database ) return if _stopping : LOGGER . warning ( 'Discarding measurement for %s while stopping' , measurement . database ) return if _buffer_size > _max_buffer_size : LOGGER . warning ( 'Discarding measurement due to buffer size limit' ) return if not measurement . fields : raise ValueError ( 'Measurement does not contain a field' ) if measurement . database not in _measurements : _measurements [ measurement . database ] = [ ] value = measurement . marshall ( ) _measurements [ measurement . database ] . append ( value ) # Ensure that len(measurements) < _trigger_size are written if not _timeout : if ( _batch_future and _batch_future . done ( ) ) or not _batch_future : _start_timeout ( ) # Check to see if the batch should be triggered _buffer_size = _pending_measurements ( ) if _buffer_size >= _trigger_size : _trigger_batch_write ( )
11,283
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L159-L212
[ "def", "set_properties", "(", "self", ",", "pathobj", ",", "props", ",", "recursive", ")", ":", "url", "=", "'/'", ".", "join", "(", "[", "pathobj", ".", "drive", ",", "'api/storage'", ",", "str", "(", "pathobj", ".", "relative_to", "(", "pathobj", ".", "drive", ")", ")", ".", "strip", "(", "'/'", ")", "]", ")", "params", "=", "{", "'properties'", ":", "encode_properties", "(", "props", ")", "}", "if", "not", "recursive", ":", "params", "[", "'recursive'", "]", "=", "'0'", "text", ",", "code", "=", "self", ".", "rest_put", "(", "url", ",", "params", "=", "params", ",", "auth", "=", "pathobj", ".", "auth", ",", "verify", "=", "pathobj", ".", "verify", ",", "cert", "=", "pathobj", ".", "cert", ")", "if", "code", "==", "404", "and", "\"Unable to find item\"", "in", "text", ":", "raise", "OSError", "(", "2", ",", "\"No such file or directory: '%s'\"", "%", "url", ")", "if", "code", "!=", "204", ":", "raise", "RuntimeError", "(", "text", ")" ]
Flush all pending measurements to InfluxDB . This will ensure that all measurements that are in the buffer for any database are written . If the requests fail it will continue to try and submit the metrics until they are successfully written .
def flush ( ) : flush_future = concurrent . Future ( ) if _batch_future and not _batch_future . done ( ) : LOGGER . debug ( 'Flush waiting on incomplete _batch_future' ) _flush_wait ( flush_future , _batch_future ) else : LOGGER . info ( 'Flushing buffer with %i measurements to InfluxDB' , _pending_measurements ( ) ) _flush_wait ( flush_future , _write_measurements ( ) ) return flush_future
11,284
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L215-L232
[ "def", "configure_splitevaluator", "(", "self", ")", ":", "if", "self", ".", "classification", ":", "speval", "=", "javabridge", ".", "make_instance", "(", "\"weka/experiment/ClassifierSplitEvaluator\"", ",", "\"()V\"", ")", "else", ":", "speval", "=", "javabridge", ".", "make_instance", "(", "\"weka/experiment/RegressionSplitEvaluator\"", ",", "\"()V\"", ")", "classifier", "=", "javabridge", ".", "call", "(", "speval", ",", "\"getClassifier\"", ",", "\"()Lweka/classifiers/Classifier;\"", ")", "return", "speval", ",", "classifier" ]
Override the default authentication credentials obtained from the environment variable configuration .
def set_auth_credentials ( username , password ) : global _credentials , _dirty LOGGER . debug ( 'Setting authentication credentials' ) _credentials = username , password _dirty = True
11,285
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L334-L346
[ "def", "_register_rules", "(", "self", ",", "rule_index", ")", ":", "registered", "=", "set", "(", ")", "for", "output_type", ",", "rules", "in", "rule_index", ".", "rules", ".", "items", "(", ")", ":", "for", "rule", "in", "rules", ":", "key", "=", "(", "output_type", ",", "rule", ")", "if", "key", "in", "registered", ":", "continue", "registered", ".", "add", "(", "key", ")", "if", "type", "(", "rule", ")", "is", "TaskRule", ":", "self", ".", "_register_task", "(", "output_type", ",", "rule", ",", "rule_index", ".", "union_rules", ")", "else", ":", "raise", "ValueError", "(", "'Unexpected Rule type: {}'", ".", "format", "(", "rule", ")", ")" ]
Override the default base URL value created from the environment variable configuration .
def set_base_url ( url ) : global _base_url , _dirty LOGGER . debug ( 'Setting base URL to %s' , url ) _base_url = url _dirty = True
11,286
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L349-L360
[ "def", "map", "(", "self", ",", "fn", ":", "Callable", "[", "[", "Any", "]", ",", "Any", "]", ")", "->", "'Reader'", ":", "def", "_compose", "(", "x", ":", "Any", ")", "->", "Any", ":", "try", ":", "ret", "=", "fn", "(", "self", ".", "run", "(", "x", ")", ")", "except", "TypeError", ":", "ret", "=", "partial", "(", "fn", ",", "self", ".", "run", "(", "x", ")", ")", "return", "ret", "return", "Reader", "(", "_compose", ")" ]
Set the maximum number of simultaneous batch submission that can execute in parallel .
def set_max_clients ( limit ) : global _dirty , _max_clients LOGGER . debug ( 'Setting maximum client limit to %i' , limit ) _dirty = True _max_clients = limit
11,287
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L390-L401
[ "def", "from_codes_and_metadata", "(", "cls", ",", "codes", ",", "categories", ",", "reverse_categories", ",", "missing_value", ")", ":", "ret", "=", "codes", ".", "view", "(", "type", "=", "cls", ",", "dtype", "=", "np", ".", "void", ")", "ret", ".", "_categories", "=", "categories", "ret", ".", "_reverse_categories", "=", "reverse_categories", "ret", ".", "_missing_value", "=", "missing_value", "return", "ret" ]
Set the probability that a batch will be submitted to the InfluxDB server . This should be a value that is greater than or equal to 0 and less than or equal to 1 . 0 . A value of 0 . 25 would represent a probability of 25% that a batch would be written to InfluxDB .
def set_sample_probability ( probability ) : global _sample_probability if not 0.0 <= probability <= 1.0 : raise ValueError ( 'Invalid probability value' ) LOGGER . debug ( 'Setting sample probability to %.2f' , probability ) _sample_probability = float ( probability )
11,288
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L404-L420
[ "def", "RecurseKeys", "(", "self", ")", ":", "yield", "self", "for", "subkey", "in", "self", ".", "GetSubkeys", "(", ")", ":", "for", "key", "in", "subkey", ".", "RecurseKeys", "(", ")", ":", "yield", "key" ]
Override the maximum duration to wait for submitting measurements to InfluxDB .
def set_timeout ( milliseconds ) : global _timeout , _timeout_interval LOGGER . debug ( 'Setting batch wait timeout to %i ms' , milliseconds ) _timeout_interval = milliseconds _maybe_stop_timeout ( ) _timeout = ioloop . IOLoop . current ( ) . add_timeout ( milliseconds , _on_timeout )
11,289
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L423-L435
[ "def", "list_key_pairs", "(", "profile", ",", "*", "*", "libcloud_kwargs", ")", ":", "conn", "=", "_get_driver", "(", "profile", "=", "profile", ")", "libcloud_kwargs", "=", "salt", ".", "utils", ".", "args", ".", "clean_kwargs", "(", "*", "*", "libcloud_kwargs", ")", "keys", "=", "conn", ".", "list_key_pairs", "(", "*", "*", "libcloud_kwargs", ")", "ret", "=", "[", "]", "for", "key", "in", "keys", ":", "ret", ".", "append", "(", "_simple_key_pair", "(", "key", ")", ")", "return", "ret" ]
Create the HTTP client with authentication credentials if required .
def _create_http_client ( ) : global _http_client defaults = { 'user_agent' : USER_AGENT } auth_username , auth_password = _credentials if auth_username and auth_password : defaults [ 'auth_username' ] = auth_username defaults [ 'auth_password' ] = auth_password _http_client = httpclient . AsyncHTTPClient ( force_instance = True , defaults = defaults , max_clients = _max_clients )
11,290
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L472-L484
[ "def", "get_category", "(", "filename", ")", ":", "return", "'/'", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ".", "split", "(", "os", ".", "sep", ")", ")" ]
Pause briefly allowing any pending metric writes to complete before shutting down .
def _flush_wait ( flush_future , write_future ) : if write_future . done ( ) : if not _pending_measurements ( ) : flush_future . set_result ( True ) return else : write_future = _write_measurements ( ) ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + 0.25 , _flush_wait , flush_future , write_future )
11,291
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L487-L505
[ "def", "get_rng", "(", "obj", "=", "None", ")", ":", "seed", "=", "(", "id", "(", "obj", ")", "+", "os", ".", "getpid", "(", ")", "+", "int", "(", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y%m%d%H%M%S%f\"", ")", ")", ")", "%", "4294967295", "if", "_RNG_SEED", "is", "not", "None", ":", "seed", "=", "_RNG_SEED", "return", "np", ".", "random", ".", "RandomState", "(", "seed", ")" ]
Waits for all futures to be completed . If the futures are not done wait 100ms and then invoke itself via the ioloop and check again . If they are done set a result on wait_future indicating the list of futures are done .
def _futures_wait ( wait_future , futures ) : global _buffer_size , _writing remaining = [ ] for ( future , batch , database , measurements ) in futures : # If the future hasn't completed, add it to the remaining stack if not future . done ( ) : remaining . append ( ( future , batch , database , measurements ) ) continue # Get the result of the HTTP request, processing any errors error = future . exception ( ) if isinstance ( error , httpclient . HTTPError ) : if error . code == 400 : _write_error_batch ( batch , database , measurements ) elif error . code >= 500 : _on_5xx_error ( batch , error , database , measurements ) else : LOGGER . error ( 'Error submitting %s batch %s to InfluxDB (%s): ' '%s' , database , batch , error . code , error . response . body ) elif isinstance ( error , ( TimeoutError , OSError , socket . error , select . error , ssl . socket_error ) ) : _on_5xx_error ( batch , error , database , measurements ) # If there are futures that remain, try again in 100ms. if remaining : return ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + 0.1 , _futures_wait , wait_future , remaining ) else : # Start the next timeout or trigger the next batch _buffer_size = _pending_measurements ( ) LOGGER . debug ( 'Batch submitted, %i measurements remain' , _buffer_size ) if _buffer_size >= _trigger_size : ioloop . IOLoop . current ( ) . add_callback ( _trigger_batch_write ) elif _buffer_size : _start_timeout ( ) _writing = False wait_future . set_result ( True )
11,292
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L508-L558
[ "def", "bind_to_storage_buffer", "(", "self", ",", "binding", "=", "0", ",", "*", ",", "offset", "=", "0", ",", "size", "=", "-", "1", ")", "->", "None", ":", "self", ".", "mglo", ".", "bind_to_storage_buffer", "(", "binding", ",", "offset", ",", "size", ")" ]
If there is a pending timeout remove it from the IOLoop and set the _timeout global to None .
def _maybe_stop_timeout ( ) : global _timeout if _timeout is not None : LOGGER . debug ( 'Removing the pending timeout (%r)' , _timeout ) ioloop . IOLoop . current ( ) . remove_timeout ( _timeout ) _timeout = None
11,293
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L561-L571
[ "def", "update_repodata", "(", "self", ",", "channels", "=", "None", ")", ":", "norm_channels", "=", "self", ".", "conda_get_condarc_channels", "(", "channels", "=", "channels", ",", "normalize", "=", "True", ")", "repodata_urls", "=", "self", ".", "_set_repo_urls_from_channels", "(", "norm_channels", ")", "self", ".", "_check_repos", "(", "repodata_urls", ")" ]
Check the buffer size and issue a warning if it s too large and a warning has not been issued for more than 60 seconds .
def _maybe_warn_about_buffer_size ( ) : global _last_warning if not _last_warning : _last_warning = time . time ( ) if _buffer_size > _warn_threshold and ( time . time ( ) - _last_warning ) > 120 : LOGGER . warning ( 'InfluxDB measurement buffer has %i entries' , _buffer_size )
11,294
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L574-L586
[ "def", "_unbind_topics", "(", "self", ",", "topics", ")", ":", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "status", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "tracing", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "streaming", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "response", ")" ]
Handle a batch submission error logging the problem and adding the measurements back to the stack .
def _on_5xx_error ( batch , error , database , measurements ) : LOGGER . info ( 'Appending %s measurements to stack due to batch %s %r' , database , batch , error ) _measurements [ database ] = _measurements [ database ] + measurements
11,295
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L589-L601
[ "def", "_set_properties", "(", "self", ")", ":", "self", ".", "SetTitle", "(", "_", "(", "\"About pyspread\"", ")", ")", "label", "=", "_", "(", "\"pyspread {version}\\nCopyright Martin Manns\"", ")", "label", "=", "label", ".", "format", "(", "version", "=", "VERSION", ")", "self", ".", "about_label", ".", "SetLabel", "(", "label", ")" ]
Invoked periodically to ensure that metrics that have been collected are submitted to InfluxDB .
def _on_timeout ( ) : global _buffer_size LOGGER . debug ( 'No metrics submitted in the last %.2f seconds' , _timeout_interval / 1000.0 ) _buffer_size = _pending_measurements ( ) if _buffer_size : return _trigger_batch_write ( ) _start_timeout ( )
11,296
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L604-L618
[ "def", "create", "(", "self", ",", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "return", "super", "(", "ImageMemberManager", ",", "self", ")", ".", "create", "(", "name", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "if", "e", ".", "http_status", "==", "403", ":", "raise", "exc", ".", "UnsharableImage", "(", "\"You cannot share a public image.\"", ")", "else", ":", "raise" ]
Determine if a batch should be processed and if not pop off all of the pending metrics for that batch .
def _sample_batch ( ) : if _sample_probability == 1.0 or random . random ( ) < _sample_probability : return True # Pop off all the metrics for the batch for database in _measurements : _measurements [ database ] = _measurements [ database ] [ _max_batch_size : ] return False
11,297
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L631-L644
[ "def", "repo_def_matches_reality", "(", "juicer_def", ",", "pulp_def", ")", ":", "return", "juicer", ".", "common", ".", "Repo", ".", "RepoDiff", "(", "juicer_repo", "=", "juicer_def", ",", "pulp_repo", "=", "pulp_def", ")" ]
Stop a running timeout if it s there then create a new one .
def _start_timeout ( ) : global _timeout LOGGER . debug ( 'Adding a new timeout in %i ms' , _timeout_interval ) _maybe_stop_timeout ( ) _timeout = ioloop . IOLoop . current ( ) . add_timeout ( ioloop . IOLoop . current ( ) . time ( ) + _timeout_interval / 1000.0 , _on_timeout )
11,298
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L647-L655
[ "def", "create_dimension_groups", "(", "dimension_positions", ")", ":", "dimension_groups", "=", "[", "]", "for", "dim_group_label", ",", "position", "in", "dimension_positions", ":", "dim_group", "=", "DimensionGroup", "(", "dim_group_label", ",", "position", ")", "for", "dim_label", "in", "nmrstarlib", ".", "RESONANCE_CLASSES", "[", "dim_group_label", "]", ":", "dim_group", ".", "dimensions", ".", "append", "(", "Dimension", "(", "dim_label", ",", "position", ")", ")", "dimension_groups", ".", "append", "(", "dim_group", ")", "return", "dimension_groups" ]
Stop a timeout if it s running and then write the measurements .
def _trigger_batch_write ( ) : global _batch_future LOGGER . debug ( 'Batch write triggered (%r/%r)' , _buffer_size , _trigger_size ) _maybe_stop_timeout ( ) _maybe_warn_about_buffer_size ( ) _batch_future = _write_measurements ( ) return _batch_future
11,299
https://github.com/sprockets/sprockets-influxdb/blob/cce73481b8f26b02e65e3f9914a9a22eceff3063/sprockets_influxdb.py#L658-L667
[ "def", "set_key_value", "(", "self", ",", "value", ",", "store_type", "=", "PUBLIC_KEY_STORE_TYPE_BASE64", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "if", "PUBLIC_KEY_STORE_TYPE_HEX", "in", "value", ":", "self", ".", "set_key_value", "(", "value", "[", "PUBLIC_KEY_STORE_TYPE_HEX", "]", ",", "PUBLIC_KEY_STORE_TYPE_HEX", ")", "elif", "PUBLIC_KEY_STORE_TYPE_BASE64", "in", "value", ":", "self", ".", "set_key_value", "(", "value", "[", "PUBLIC_KEY_STORE_TYPE_BASE64", "]", ",", "PUBLIC_KEY_STORE_TYPE_BASE64", ")", "elif", "PUBLIC_KEY_STORE_TYPE_BASE85", "in", "value", ":", "self", ".", "set_key_value", "(", "value", "[", "PUBLIC_KEY_STORE_TYPE_BASE85", "]", ",", "PUBLIC_KEY_STORE_TYPE_BASE85", ")", "elif", "PUBLIC_KEY_STORE_TYPE_JWK", "in", "value", ":", "self", ".", "set_key_value", "(", "value", "[", "PUBLIC_KEY_STORE_TYPE_JWK", "]", ",", "PUBLIC_KEY_STORE_TYPE_JWK", ")", "elif", "PUBLIC_KEY_STORE_TYPE_PEM", "in", "value", ":", "self", ".", "set_key_value", "(", "value", "[", "PUBLIC_KEY_STORE_TYPE_PEM", "]", ",", "PUBLIC_KEY_STORE_TYPE_PEM", ")", "else", ":", "self", ".", "_value", "=", "value", "self", ".", "_store_type", "=", "store_type" ]