query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Delete any symlinks present at the root of a directory .
def remove_existing_links ( root_dir ) : logger = logging . getLogger ( __name__ ) for name in os . listdir ( root_dir ) : full_name = os . path . join ( root_dir , name ) if os . path . islink ( full_name ) : logger . debug ( 'Deleting existing symlink {0}' . format ( full_name ) ) os . remove ( full_name )
11,900
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/stackdocs/build.py#L434-L455
[ "def", "create_experiment", "(", "args", ")", ":", "config_file_name", "=", "''", ".", "join", "(", "random", ".", "sample", "(", "string", ".", "ascii_letters", "+", "string", ".", "digits", ",", "8", ")", ")", "nni_config", "=", "Config", "(", "config_file_name", ")", "config_path", "=", "os", ".", "path", ".", "abspath", "(", "args", ".", "config", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "config_path", ")", ":", "print_error", "(", "'Please set correct config path!'", ")", "exit", "(", "1", ")", "experiment_config", "=", "get_yml_content", "(", "config_path", ")", "validate_all_content", "(", "experiment_config", ",", "config_path", ")", "nni_config", ".", "set_config", "(", "'experimentConfig'", ",", "experiment_config", ")", "launch_experiment", "(", "args", ",", "experiment_config", ",", "'new'", ",", "config_file_name", ")", "nni_config", ".", "set_config", "(", "'restServerPort'", ",", "args", ".", "port", ")" ]
Render a data model diagram
def render_diagram ( out_base ) : import codecs import subprocess import sadisplay # generate class descriptions desc = sadisplay . describe ( list ( model_registry . values ( ) ) , show_methods = False , show_properties = True , show_indexes = True , ) # write description in DOT format with codecs . open ( out_base + '.dot' , 'w' , encoding = 'utf-8' ) as f : f . write ( sadisplay . dot ( desc ) ) # check existence of DOT_EXECUTABLE variable and file if not hasattr ( config , 'DOT_EXECUTABLE' ) : raise RuntimeError ( "Please configure the 'DOT_EXECUTABLE' variable in your 'project_config.py'" ) if not os . path . exists ( config . DOT_EXECUTABLE ) : raise IOError ( "Could not find file pointed to by 'DOT_EXECUTABLE': " + str ( config . DOT_EXECUTABLE ) ) # render to image using DOT # noinspection PyUnresolvedReferences subprocess . check_call ( [ config . DOT_EXECUTABLE , '-T' , 'png' , '-o' , out_base + '.png' , out_base + '.dot' ] )
11,901
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/orm/base.py#L187-L228
[ "def", "_verify_options", "(", "options", ")", ":", "# sanity check all vals used for bitwise operations later", "bitwise_args", "=", "[", "(", "'level'", ",", "options", "[", "'level'", "]", ")", ",", "(", "'facility'", ",", "options", "[", "'facility'", "]", ")", "]", "bitwise_args", ".", "extend", "(", "[", "(", "'option'", ",", "x", ")", "for", "x", "in", "options", "[", "'options'", "]", "]", ")", "for", "opt_name", ",", "opt", "in", "bitwise_args", ":", "if", "not", "hasattr", "(", "syslog", ",", "opt", ")", ":", "log", ".", "error", "(", "'syslog has no attribute %s'", ",", "opt", ")", "return", "False", "if", "not", "isinstance", "(", "getattr", "(", "syslog", ",", "opt", ")", ",", "int", ")", ":", "log", ".", "error", "(", "'%s is not a valid syslog %s'", ",", "opt", ",", "opt_name", ")", "return", "False", "# Sanity check tag", "if", "'tag'", "in", "options", ":", "if", "not", "isinstance", "(", "options", "[", "'tag'", "]", ",", "six", ".", "string_types", ")", ":", "log", ".", "error", "(", "'tag must be a string'", ")", "return", "False", "if", "len", "(", "options", "[", "'tag'", "]", ")", ">", "32", ":", "log", ".", "error", "(", "'tag size is limited to 32 characters'", ")", "return", "False", "return", "True" ]
Get the current max value of the id column .
def get_max_id ( cls , session ) : # sqlalchemy allows only one level of inheritance, so just check this class and all its bases id_base = None for c in [ cls ] + list ( cls . __bases__ ) : for base_class in c . __bases__ : if base_class . __name__ == 'Base' : if id_base is None : # we found our base class for determining the ID id_base = c else : raise RuntimeError ( "Multiple base object classes for class " + cls . __name__ ) # this should never happen if id_base is None : raise RuntimeError ( "Error searching for base class of " + cls . __name__ ) # get its max ID max_id = session . query ( func . max ( id_base . id ) ) . scalar ( ) # if no object is present, None is returned if max_id is None : max_id = 0 return max_id
11,902
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/orm/base.py#L112-L146
[ "def", "dir", "(", "self", ",", "path", "=", "'/'", ",", "slash", "=", "True", ",", "bus", "=", "False", ",", "timeout", "=", "0", ")", ":", "if", "slash", ":", "msg", "=", "MSG_DIRALLSLASH", "else", ":", "msg", "=", "MSG_DIRALL", "if", "bus", ":", "flags", "=", "self", ".", "flags", "|", "FLG_BUS_RET", "else", ":", "flags", "=", "self", ".", "flags", "&", "~", "FLG_BUS_RET", "ret", ",", "data", "=", "self", ".", "sendmess", "(", "msg", ",", "str2bytez", "(", "path", ")", ",", "flags", ",", "timeout", "=", "timeout", ")", "if", "ret", "<", "0", ":", "raise", "OwnetError", "(", "-", "ret", ",", "self", ".", "errmess", "[", "-", "ret", "]", ",", "path", ")", "if", "data", ":", "return", "bytes2str", "(", "data", ")", ".", "split", "(", "','", ")", "else", ":", "return", "[", "]" ]
Truncate the value of a string field to the field s max length .
def truncate_to_field_length ( self , field , value ) : max_len = getattr ( self . __class__ , field ) . prop . columns [ 0 ] . type . length if value and len ( value ) > max_len : return value [ : max_len ] else : return value
11,903
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/orm/base.py#L148-L181
[ "def", "public_timeline", "(", "self", ",", "delegate", ",", "params", "=", "{", "}", ",", "extra_args", "=", "None", ")", ":", "return", "self", ".", "__get", "(", "'/statuses/public_timeline.atom'", ",", "delegate", ",", "params", ",", "extra_args", "=", "extra_args", ")" ]
Tell Tkinter to process untnwisted event loop . It registers just once the update handle .
def extern ( obj , timeout = 200 ) : global installed # Register it just once. if not installed : install_hook ( obj , timeout ) installed = True
11,904
https://github.com/untwisted/untwisted/blob/8a8d9c8a8d0f3452d5de67cd760297bb5759f637/untwisted/tkinter.py#L15-L25
[ "def", "get_storage", "(", "path", "=", "None", ",", "options", "=", "None", ")", ":", "path", "=", "path", "or", "settings", ".", "STORAGE", "options", "=", "options", "or", "settings", ".", "STORAGE_OPTIONS", "if", "not", "path", ":", "raise", "ImproperlyConfigured", "(", "'You must specify a storage class using '", "'DBBACKUP_STORAGE settings.'", ")", "return", "Storage", "(", "path", ",", "*", "*", "options", ")" ]
Tell untwisted to process an extern event loop .
def intern ( obj , timeout ) : core . gear . timeout = timeout core . gear . pool . append ( obj )
11,905
https://github.com/untwisted/untwisted/blob/8a8d9c8a8d0f3452d5de67cd760297bb5759f637/untwisted/tkinter.py#L27-L34
[ "def", "clear_composition", "(", "self", ")", ":", "# Implemented from template for osid.resource.ResourceForm.clear_avatar_template", "if", "(", "self", ".", "get_composition_metadata", "(", ")", ".", "is_read_only", "(", ")", "or", "self", ".", "get_composition_metadata", "(", ")", ".", "is_required", "(", ")", ")", ":", "raise", "errors", ".", "NoAccess", "(", ")", "self", ".", "_my_map", "[", "'compositionId'", "]", "=", "self", ".", "_composition_default" ]
Construct a reference node for a JIRA ticket .
def _make_ticket_node ( ticket_id , config , options = None ) : options = options or { } ref = config . jira_uri_template . format ( ticket = ticket_id ) link = nodes . reference ( text = ticket_id , refuri = ref , * * options ) return link
11,906
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/jira.py#L16-L21
[ "def", "_cryptodome_encrypt", "(", "cipher_factory", ",", "plaintext", ",", "key", ",", "iv", ")", ":", "encryptor", "=", "cipher_factory", "(", "key", ",", "iv", ")", "return", "encryptor", ".", "encrypt", "(", "plaintext", ")" ]
Make a separator for a prose - like list with between items except for and after the second to last item .
def _oxford_comma_separator ( i , length ) : if length == 1 : return None elif length < 3 and i == 0 : return ' and ' elif i < length - 2 : return ', ' elif i == length - 2 : return ', and ' else : return None
11,907
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/jira.py#L37-L50
[ "def", "legacy_signature", "(", "*", "*", "kwargs_mapping", ")", ":", "def", "signature_decorator", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "redirected_kwargs", "=", "{", "kwargs_mapping", "[", "k", "]", "if", "k", "in", "kwargs_mapping", "else", "k", ":", "v", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", "}", "return", "f", "(", "*", "args", ",", "*", "*", "redirected_kwargs", ")", "return", "wrapper", "return", "signature_decorator" ]
Sphinx role for referencing a JIRA ticket .
def jira_role ( name , rawtext , text , lineno , inliner , options = None , content = None , oxford_comma = True ) : options = options or { } content = content or [ ] config = inliner . document . settings . env . app . config ticket_ids = [ each . strip ( ) for each in utils . unescape ( text ) . split ( ',' ) ] n_tickets = len ( ticket_ids ) if oxford_comma : sep_factory = _oxford_comma_separator else : sep_factory = _comma_separator node_list = [ ] for i , ticket_id in enumerate ( ticket_ids ) : node = _make_ticket_node ( ticket_id , config , options = options ) node_list . append ( node ) sep_text = sep_factory ( i , n_tickets ) if sep_text is not None : sep = nodes . raw ( text = sep_text , format = 'html' ) node_list . append ( sep ) return node_list , [ ]
11,908
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/jira.py#L53-L83
[ "def", "update_experiment", "(", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "return", "None", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "experiment_dict", "[", "key", "]", ",", "dict", ")", ":", "if", "experiment_dict", "[", "key", "]", ".", "get", "(", "'status'", ")", "!=", "'STOPPED'", ":", "nni_config", "=", "Config", "(", "experiment_dict", "[", "key", "]", "[", "'fileName'", "]", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "not", "detect_process", "(", "rest_pid", ")", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "'STOPPED'", ")", "continue", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "startTime", ",", "endTime", "=", "get_experiment_time", "(", "rest_port", ")", "if", "startTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'startTime'", ",", "startTime", ")", "if", "endTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'endTime'", ",", "endTime", ")", "status", "=", "get_experiment_status", "(", "rest_port", ")", "if", "status", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "status", ")" ]
Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in braces . Useful for changelogs .
def jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '[' , close_symbol = ']' ) : node_list , _ = jira_role ( name , rawtext , text , lineno , inliner , options = options , content = None , oxford_comma = False ) node_list = nodes . raw ( text = open_symbol , format = 'html' ) + node_list + nodes . raw ( text = close_symbol , format = 'html' ) return node_list , [ ]
11,909
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/jira.py#L86-L102
[ "def", "feed", "(", "self", ",", "data_len", ",", "feed_time", "=", "None", ")", ":", "self", ".", "_bytes_transferred", "+=", "data_len", "self", ".", "_collected_bytes_transferred", "+=", "data_len", "time_now", "=", "feed_time", "or", "time", ".", "time", "(", ")", "time_diff", "=", "time_now", "-", "self", ".", "_last_feed_time", "if", "time_diff", "<", "self", ".", "_sample_min_time", ":", "return", "self", ".", "_last_feed_time", "=", "time", ".", "time", "(", ")", "if", "data_len", "==", "0", "and", "time_diff", ">=", "self", ".", "_stall_time", ":", "self", ".", "_stalled", "=", "True", "return", "self", ".", "_samples", ".", "append", "(", "(", "time_diff", ",", "self", ".", "_collected_bytes_transferred", ")", ")", "self", ".", "_collected_bytes_transferred", "=", "0" ]
Sphinx role for referencing a JIRA ticket with ticket numbers enclosed in parentheses . Useful for changelogs .
def jira_parens_role ( name , rawtext , text , lineno , inliner , options = None , content = None ) : return jira_bracket_role ( name , rawtext , text , lineno , inliner , options = None , content = None , open_symbol = '(' , close_symbol = ')' )
11,910
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/jira.py#L105-L118
[ "def", "getDefaultApplicationForMimeType", "(", "self", ",", "pchMimeType", ",", "pchAppKeyBuffer", ",", "unAppKeyBufferLen", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getDefaultApplicationForMimeType", "result", "=", "fn", "(", "pchMimeType", ",", "pchAppKeyBuffer", ",", "unAppKeyBufferLen", ")", "return", "result" ]
Call API method . Generate request . Parse response . Process errors method str API method url for request . Contains parameters params dict parameters for method url
def _method_call ( self , method , category , * * kwargs ) : session = requests . Session ( ) try : response = session . get ( "http://" + self . _api_address ) except requests . exceptions . ConnectionError : raise FantasyDataError ( 'Error: Cannot connect to the FantasyData API' ) method = method . format ( format = self . _response_format , * * kwargs ) request_url = "/v3/{game_type}/{category}/{format}/{method}?{get_params}" . format ( game_type = self . game_type , category = category , format = self . _response_format , method = method , get_params = self . _get_params ) response = session . get ( self . _api_schema + self . _api_address + request_url , headers = self . _headers ) result = response . json ( ) if isinstance ( result , dict ) and response . status_code : if response . status_code == 401 : raise FantasyDataError ( 'Error: Invalid API key' ) elif response . status_code == 200 : # for NBA everything is ok here. pass else : raise FantasyDataError ( 'Error: Failed to get response' ) return result
11,911
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L38-L70
[ "def", "focusInEvent", "(", "self", ",", "event", ")", ":", "self", ".", "_changedRecord", "=", "-", "1", "super", "(", "XOrbRecordBox", ",", "self", ")", ".", "focusInEvent", "(", "event", ")" ]
Projected Player Game Stats by Player
def get_projected_player_game_stats_by_player ( self , season , week , player_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByPlayerID/{season}/{week}/{player_id}" , "projections" , season = season , week = week , player_id = player_id ) return result
11,912
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L161-L166
[ "def", "_parse", "(", "self", ",", "params", ")", ":", "if", "not", "isinstance", "(", "params", ",", "dict", ")", ":", "return", "None", ",", "None", "if", "len", "(", "params", ")", "==", "0", ":", "return", "None", ",", "None", "selectors", "=", "list", "(", ")", "modifiers", "=", "list", "(", ")", "for", "k", "in", "params", ".", "keys", "(", ")", ":", "if", "k", "in", "LOGICAL_OPERATORS", ":", "selectors", ".", "append", "(", "self", ".", "_logical", "(", "k", ",", "params", "[", "k", "]", ")", ")", "elif", "k", "in", "QUERY_MODIFIERS", ":", "modifiers", ".", "append", "(", "self", ".", "_modifier", "(", "k", ",", "params", "[", "k", "]", ")", ")", "else", ":", "if", "k", "==", "'_id'", ":", "selectors", ".", "append", "(", "\"rowid%s\"", "%", "(", "self", ".", "_value_wrapper", "(", "params", "[", "k", "]", ")", ")", ")", "else", ":", "selectors", ".", "append", "(", "\"%s%s\"", "%", "(", "k", ",", "self", ".", "_value_wrapper", "(", "params", "[", "k", "]", ")", ")", ")", "_selectors", "=", "' AND '", ".", "join", "(", "selectors", ")", ".", "strip", "(", ")", "_modifiers", "=", "' '", ".", "join", "(", "modifiers", ")", ".", "strip", "(", ")", "return", "_selectors", ",", "_modifiers" ]
Projected Player Game Stats by Team
def get_projected_player_game_stats_by_team ( self , season , week , team_id ) : result = self . _method_call ( "PlayerGameProjectionStatsByTeam/{season}/{week}/{team_id}" , "projections" , season = season , week = week , team_id = team_id ) return result
11,913
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L168-L173
[ "def", "_parse", "(", "self", ",", "params", ")", ":", "if", "not", "isinstance", "(", "params", ",", "dict", ")", ":", "return", "None", ",", "None", "if", "len", "(", "params", ")", "==", "0", ":", "return", "None", ",", "None", "selectors", "=", "list", "(", ")", "modifiers", "=", "list", "(", ")", "for", "k", "in", "params", ".", "keys", "(", ")", ":", "if", "k", "in", "LOGICAL_OPERATORS", ":", "selectors", ".", "append", "(", "self", ".", "_logical", "(", "k", ",", "params", "[", "k", "]", ")", ")", "elif", "k", "in", "QUERY_MODIFIERS", ":", "modifiers", ".", "append", "(", "self", ".", "_modifier", "(", "k", ",", "params", "[", "k", "]", ")", ")", "else", ":", "if", "k", "==", "'_id'", ":", "selectors", ".", "append", "(", "\"rowid%s\"", "%", "(", "self", ".", "_value_wrapper", "(", "params", "[", "k", "]", ")", ")", ")", "else", ":", "selectors", ".", "append", "(", "\"%s%s\"", "%", "(", "k", ",", "self", ".", "_value_wrapper", "(", "params", "[", "k", "]", ")", ")", ")", "_selectors", "=", "' AND '", ".", "join", "(", "selectors", ")", ".", "strip", "(", ")", "_modifiers", "=", "' '", ".", "join", "(", "modifiers", ")", ".", "strip", "(", ")", "return", "_selectors", ",", "_modifiers" ]
Projected Player Game Stats by Week
def get_projected_player_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "PlayerGameProjectionStatsByWeek/{season}/{week}" , "projections" , season = season , week = week ) return result
11,914
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L175-L180
[ "def", "andify", "(", "list_of_strings", ")", ":", "result", "=", "', '", ".", "join", "(", "list_of_strings", ")", "comma_index", "=", "result", ".", "rfind", "(", "','", ")", "if", "comma_index", ">", "-", "1", ":", "result", "=", "result", "[", ":", "comma_index", "]", "+", "' and'", "+", "result", "[", "comma_index", "+", "1", ":", "]", "return", "result" ]
Projected Fantasy Defense Game Stats by Week
def get_projected_fantasy_defense_game_stats_by_week ( self , season , week ) : result = self . _method_call ( "FantasyDefenseProjectionsByGame/{season}/{week}" , "projections" , season = season , week = week ) return result
11,915
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L182-L187
[ "def", "andify", "(", "list_of_strings", ")", ":", "result", "=", "', '", ".", "join", "(", "list_of_strings", ")", "comma_index", "=", "result", ".", "rfind", "(", "','", ")", "if", "comma_index", ">", "-", "1", ":", "result", "=", "result", "[", ":", "comma_index", "]", "+", "' and'", "+", "result", "[", "comma_index", "+", "1", ":", "]", "return", "result" ]
Injuries by week
def get_injuries ( self , season , week ) : result = self . _method_call ( "Injuries/{season}/{week}" , "stats" , season = season , week = week ) return result
11,916
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L232-L237
[ "def", "setup_temp_logger", "(", "log_level", "=", "'error'", ")", ":", "if", "is_temp_logging_configured", "(", ")", ":", "logging", ".", "getLogger", "(", "__name__", ")", ".", "warning", "(", "'Temporary logging is already configured'", ")", "return", "if", "log_level", "is", "None", ":", "log_level", "=", "'warning'", "level", "=", "LOG_LEVELS", ".", "get", "(", "log_level", ".", "lower", "(", ")", ",", "logging", ".", "ERROR", ")", "handler", "=", "None", "for", "handler", "in", "logging", ".", "root", ".", "handlers", ":", "if", "handler", "in", "(", "LOGGING_NULL_HANDLER", ",", "LOGGING_STORE_HANDLER", ")", ":", "continue", "if", "not", "hasattr", "(", "handler", ",", "'stream'", ")", ":", "# Not a stream handler, continue", "continue", "if", "handler", ".", "stream", "is", "sys", ".", "stderr", ":", "# There's already a logging handler outputting to sys.stderr", "break", "else", ":", "handler", "=", "LOGGING_TEMP_HANDLER", "handler", ".", "setLevel", "(", "level", ")", "# Set the default temporary console formatter config", "formatter", "=", "logging", ".", "Formatter", "(", "'[%(levelname)-8s] %(message)s'", ",", "datefmt", "=", "'%H:%M:%S'", ")", "handler", ".", "setFormatter", "(", "formatter", ")", "logging", ".", "root", ".", "addHandler", "(", "handler", ")", "# Sync the null logging handler messages with the temporary handler", "if", "LOGGING_NULL_HANDLER", "is", "not", "None", ":", "LOGGING_NULL_HANDLER", ".", "sync_with_handlers", "(", "[", "handler", "]", ")", "else", ":", "logging", ".", "getLogger", "(", "__name__", ")", ".", "debug", "(", "'LOGGING_NULL_HANDLER is already None, can\\'t sync messages '", "'with it'", ")", "# Remove the temporary null logging handler", "__remove_null_logging_handler", "(", ")", "global", "__TEMP_LOGGING_CONFIGURED", "__TEMP_LOGGING_CONFIGURED", "=", "True" ]
Injuries by week and team
def get_injuries_by_team ( self , season , week , team_id ) : result = self . _method_call ( "Injuries/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result
11,917
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L239-L244
[ "def", "profile_args", "(", "_args", ")", ":", "# TODO: clean this up in a way that works for both py2/3", "if", "(", "_args", ".", "get", "(", "'app'", ",", "{", "}", ")", ".", "get", "(", "'optional'", ")", "is", "not", "None", "or", "_args", ".", "get", "(", "'app'", ",", "{", "}", ")", ".", "get", "(", "'required'", ")", "is", "not", "None", ")", ":", "# detect v3 schema", "app_args_optional", "=", "_args", ".", "get", "(", "'app'", ",", "{", "}", ")", ".", "get", "(", "'optional'", ",", "{", "}", ")", "app_args_required", "=", "_args", ".", "get", "(", "'app'", ",", "{", "}", ")", ".", "get", "(", "'required'", ",", "{", "}", ")", "default_args", "=", "_args", ".", "get", "(", "'default'", ",", "{", "}", ")", "_args", "=", "{", "}", "_args", ".", "update", "(", "app_args_optional", ")", "_args", ".", "update", "(", "app_args_required", ")", "_args", ".", "update", "(", "default_args", ")", "elif", "_args", ".", "get", "(", "'app'", ")", "is", "not", "None", "and", "_args", ".", "get", "(", "'default'", ")", "is", "not", "None", ":", "# detect v2 schema", "app_args", "=", "_args", ".", "get", "(", "'app'", ",", "{", "}", ")", "default_args", "=", "_args", ".", "get", "(", "'default'", ",", "{", "}", ")", "_args", "=", "{", "}", "_args", ".", "update", "(", "app_args", ")", "_args", ".", "update", "(", "default_args", ")", "return", "_args" ]
Box score by week and team
def get_box_score_by_team ( self , season , week , team_id ) : result = self . _method_call ( "BoxScoreV3/{season}/{week}/{team_id}" , "stats" , season = season , week = week , team_id = team_id ) return result
11,918
https://github.com/ffcalculator/fantasydata-python/blob/af90cac1e80d8356cffaa80621ee513201f6c661/fantasy_data/FantasyData.py#L246-L251
[ "def", "__get_file", "(", "self", ",", "file", ")", ":", "file_object", "=", "None", "if", "file", "[", "'name'", "]", "in", "request", ".", "files", ":", "file_object", "=", "request", ".", "files", "[", "file", "[", "'name'", "]", "]", "clean_filename", "=", "secure_filename", "(", "file_object", ".", "filename", ")", "if", "clean_filename", "==", "''", ":", "return", "file_object", "if", "file_object", "and", "self", ".", "__allowed_extension", "(", "clean_filename", ",", "file", "[", "'extensions'", "]", ")", ":", "return", "file_object", "elif", "file", "[", "'name'", "]", "not", "in", "request", ".", "files", "and", "file", "[", "'required'", "]", ":", "return", "file_object", "return", "file_object" ]
Authenticates against the LDAP directory and returns the corresponding User object if successful . Returns None on failure .
def authenticate ( self , password ) : user = None try : self . _authenticate_user_dn ( password ) self . _check_requirements ( ) self . _get_or_create_user ( ) user = self . _user except self . AuthenticationFailed as e : logger . debug ( u"Authentication failed for %s: %s" % ( self . _username , e ) ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'authenticate' , exception = e ) if len ( results ) == 0 : logger . warning ( u"Caught LDAPError while authenticating %s: %s" , self . _username , pprint . pformat ( e ) ) except Exception : logger . exception ( u"Caught Exception while authenticating %s" , self . _username ) raise return user
11,919
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L325-L351
[ "def", "sync_readmes", "(", ")", ":", "print", "(", "\"syncing README\"", ")", "with", "open", "(", "\"README.md\"", ",", "'r'", ")", "as", "reader", ":", "file_text", "=", "reader", ".", "read", "(", ")", "with", "open", "(", "\"README\"", ",", "'w'", ")", "as", "writer", ":", "writer", ".", "write", "(", "file_text", ")" ]
If allowed by the configuration this returns the set of permissions defined by the user s LDAP group memberships .
def get_group_permissions ( self ) : if self . _group_permissions is None : self . _group_permissions = set ( ) if self . settings . FIND_GROUP_PERMS : try : self . _load_group_permissions ( ) except ldap . LDAPError as e : results = ldap_error . send ( self . backend . __class__ , context = 'get_group_permissions' , exception = e ) if len ( results ) == 0 : logger . warning ( "Caught LDAPError loading group permissions: %s" , pprint . pformat ( e ) ) return self . _group_permissions
11,920
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L353-L372
[ "def", "_read_body_by_chunk", "(", "self", ",", "response", ",", "file", ",", "raw", "=", "False", ")", ":", "reader", "=", "ChunkedTransferReader", "(", "self", ".", "_connection", ")", "file_is_async", "=", "hasattr", "(", "file", ",", "'drain'", ")", "while", "True", ":", "chunk_size", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_header", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "if", "not", "chunk_size", ":", "break", "while", "True", ":", "content", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_body", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "not", "content", ":", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "break", "content", "=", "self", ".", "_decompress_data", "(", "content", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "content", "=", "self", ".", "_flush_decompressor", "(", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "trailer_data", "=", "yield", "from", "reader", ".", "read_trailer", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "trailer_data", ")", "if", "file", "and", "raw", ":", "file", ".", "write", "(", "trailer_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "response", ".", "fields", ".", "parse", "(", "trailer_data", ")" ]
Populates our User object with information from the LDAP directory .
def _populate_user ( self ) : self . _populate_user_from_attributes ( ) self . _populate_user_from_group_memberships ( ) self . _populate_user_from_dn_regex ( ) self . _populate_user_from_dn_regex_negation ( )
11,921
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L584-L591
[ "def", "upload", "(", "self", ",", "params", "=", "{", "}", ")", ":", "if", "self", ".", "upload_token", "is", "not", "None", ":", "# resume upload", "status", "=", "self", ".", "check", "(", ")", "if", "status", "[", "'status'", "]", "!=", "4", ":", "return", "self", ".", "commit", "(", ")", "else", ":", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")", "else", ":", "# new upload", "self", ".", "create", "(", "self", ".", "prepare_video_params", "(", "*", "*", "params", ")", ")", "self", ".", "create_file", "(", ")", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")" ]
Populates a User profile object with fields from the LDAP directory .
def _populate_and_save_user_profile ( self ) : try : app_label , class_name = django . conf . settings . AUTH_PROFILE_MODULE . split ( '.' ) profile_model = apps . get_model ( app_label , class_name ) profile , created = profile_model . objects . get_or_create ( user = self . _user ) save_profile = False logger . debug ( "Populating Django user profile for %s" , get_user_username ( self . _user ) ) save_profile = self . _populate_profile_from_attributes ( profile ) or save_profile save_profile = self . _populate_profile_flags_from_dn_regex ( profile ) or save_profile save_profile = self . _populate_profile_from_group_memberships ( profile ) or save_profile signal_responses = populate_user_profile . send ( self . backend . __class__ , profile = profile , ldap_user = self ) if len ( signal_responses ) > 0 : save_profile = True if save_profile : profile . save ( ) except ObjectDoesNotExist : logger . debug ( "Django user %s does not have a profile to populate" , get_user_username ( self . _user ) ) except LookupError : logger . debug ( 'User Profile model defined in settings.AUTH_PROFILE_MODULE is invalid' )
11,922
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L633-L658
[ "def", "cancelHistoricalData", "(", "self", ",", "contracts", "=", "None", ")", ":", "if", "contracts", "==", "None", ":", "contracts", "=", "list", "(", "self", ".", "contracts", ".", "values", "(", ")", ")", "elif", "not", "isinstance", "(", "contracts", ",", "list", ")", ":", "contracts", "=", "[", "contracts", "]", "for", "contract", "in", "contracts", ":", "# tickerId = self.tickerId(contract.m_symbol)", "tickerId", "=", "self", ".", "tickerId", "(", "self", ".", "contractString", "(", "contract", ")", ")", "self", ".", "ibConn", ".", "cancelHistoricalData", "(", "tickerId", "=", "tickerId", ")" ]
Populate the given profile object from AUTH_LDAP_PROFILE_ATTR_MAP . Returns True if the profile was modified .
def _populate_profile_from_attributes ( self , profile ) : save_profile = False for field , attr in self . settings . PROFILE_ATTR_MAP . items ( ) : try : # user_attrs is a hash of lists of attribute values setattr ( profile , field , self . attrs [ attr ] [ 0 ] ) save_profile = True except Exception : logger . warning ( "%s does not have a value for the attribute %s" , self . dn , attr ) return save_profile
11,923
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L660-L675
[ "def", "_get_data_segments", "(", "channels", ",", "start", ",", "end", ",", "connection", ")", ":", "allsegs", "=", "io_nds2", ".", "get_availability", "(", "channels", ",", "start", ",", "end", ",", "connection", "=", "connection", ")", "return", "allsegs", ".", "intersection", "(", "allsegs", ".", "keys", "(", ")", ")" ]
Populate the given profile object from AUTH_LDAP_PROFILE_FLAGS_BY_GROUP . Returns True if the profile was modified .
def _populate_profile_from_group_memberships ( self , profile ) : save_profile = False for field , group_dns in self . settings . PROFILE_FLAGS_BY_GROUP . items ( ) : if isinstance ( group_dns , six . string_types ) : group_dns = [ group_dns ] value = any ( self . _get_groups ( ) . is_member_of ( dn ) for dn in group_dns ) setattr ( profile , field , value ) save_profile = True return save_profile
11,924
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L692-L706
[ "def", "_get_data_segments", "(", "channels", ",", "start", ",", "end", ",", "connection", ")", ":", "allsegs", "=", "io_nds2", ".", "get_availability", "(", "channels", ",", "start", ",", "end", ",", "connection", "=", "connection", ")", "return", "allsegs", ".", "intersection", "(", "allsegs", ".", "keys", "(", ")", ")" ]
Populates self . _group_permissions based on LDAP group membership and Django group permissions .
def _load_group_permissions ( self ) : group_names = self . _get_groups ( ) . get_group_names ( ) perms = Permission . objects . filter ( group__name__in = group_names ) perms = perms . values_list ( 'content_type__app_label' , 'codename' ) perms = perms . order_by ( ) self . _group_permissions = set ( [ "%s.%s" % ( ct , name ) for ct , name in perms ] )
11,925
https://github.com/DheerendraRathor/django-auth-ldap-ng/blob/4d2458bd90c4539353c5bfd5ea793c1e59780ee8/django_auth_ldap/backend.py#L729-L740
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Method to get all department members .
def get_task_id ( self ) : task_id = self . json_response . get ( "task_id" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return task_id
11,926
https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/message_request.py#L24-L28
[ "def", "_init_client_authentication", "(", "self", ",", "client_certchain_file", ":", "Optional", "[", "str", "]", ",", "client_key_file", ":", "Optional", "[", "str", "]", ",", "client_key_type", ":", "OpenSslFileTypeEnum", ",", "client_key_password", ":", "str", ",", "ignore_client_authentication_requests", ":", "bool", ")", "->", "None", ":", "if", "client_certchain_file", "is", "not", "None", "and", "client_key_file", "is", "not", "None", ":", "self", ".", "_use_private_key", "(", "client_certchain_file", ",", "client_key_file", ",", "client_key_type", ",", "client_key_password", ")", "if", "ignore_client_authentication_requests", ":", "if", "client_certchain_file", ":", "raise", "ValueError", "(", "'Cannot enable both client_certchain_file and ignore_client_authentication_requests'", ")", "self", ".", "_ssl_ctx", ".", "set_client_cert_cb_NULL", "(", ")" ]
Method to get messageId of group created .
def get_message_id ( self ) : message_id = self . json_response . get ( "messageId" , None ) self . logger . info ( "%s\t%s" % ( self . request_method , self . request_url ) ) return message_id
11,927
https://github.com/gmdzy2010/dingtalk_sdk_gmdzy2010/blob/b06cb1f78f89be9554dcb6101af8bc72718a9ecd/dingtalk_sdk_gmdzy2010/message_request.py#L168-L172
[ "def", "countRandomBitFrequencies", "(", "numTerms", "=", "100000", ",", "percentSparsity", "=", "0.01", ")", ":", "# Accumulate counts by inplace-adding sparse matrices", "counts", "=", "SparseMatrix", "(", ")", "size", "=", "128", "*", "128", "counts", ".", "resize", "(", "1", ",", "size", ")", "# Pre-allocate buffer sparse matrix", "sparseBitmap", "=", "SparseMatrix", "(", ")", "sparseBitmap", ".", "resize", "(", "1", ",", "size", ")", "random", ".", "seed", "(", "42", ")", "# Accumulate counts for each bit for each word", "numWords", "=", "0", "for", "term", "in", "xrange", "(", "numTerms", ")", ":", "bitmap", "=", "random", ".", "sample", "(", "xrange", "(", "size", ")", ",", "int", "(", "size", "*", "percentSparsity", ")", ")", "bitmap", ".", "sort", "(", ")", "sparseBitmap", ".", "setRowFromSparse", "(", "0", ",", "bitmap", ",", "[", "1", "]", "*", "len", "(", "bitmap", ")", ")", "counts", "+=", "sparseBitmap", "numWords", "+=", "1", "# Compute normalized version of counts as a separate matrix", "frequencies", "=", "SparseMatrix", "(", ")", "frequencies", ".", "resize", "(", "1", ",", "size", ")", "frequencies", ".", "copy", "(", "counts", ")", "frequencies", ".", "divide", "(", "float", "(", "numWords", ")", ")", "# Wrap up by printing some statistics and then saving the normalized version", "printFrequencyStatistics", "(", "counts", ",", "frequencies", ",", "numWords", ",", "size", ")", "frequencyFilename", "=", "\"bit_frequencies_random.pkl\"", "print", "\"Saving frequency matrix in\"", ",", "frequencyFilename", "with", "open", "(", "frequencyFilename", ",", "\"wb\"", ")", "as", "frequencyPickleFile", ":", "pickle", ".", "dump", "(", "frequencies", ",", "frequencyPickleFile", ")", "return", "counts" ]
Wraps a function to run in a given directory .
def change_dir ( directory ) : def cd_decorator ( func ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : org_path = os . getcwd ( ) os . chdir ( directory ) func ( * args , * * kwargs ) os . chdir ( org_path ) return wrapper return cd_decorator
11,928
https://github.com/brutus/wdiffhtml/blob/e97b524a7945f7a626e33ec141343120c524d9fa/tasks/__init__.py#L27-L40
[ "def", "add_annotation", "(", "self", ",", "entity", ",", "annotation", ",", "value", ")", ":", "url", "=", "self", ".", "base_path", "+", "'term/add-annotation'", "data", "=", "{", "'tid'", ":", "entity", "[", "'id'", "]", ",", "'annotation_tid'", ":", "annotation", "[", "'id'", "]", ",", "'value'", ":", "value", ",", "'term_version'", ":", "entity", "[", "'version'", "]", ",", "'annotation_term_version'", ":", "annotation", "[", "'version'", "]", "}", "return", "self", ".", "post", "(", "url", ",", "data", ")" ]
Builds CSS from SASS .
def build_css ( minimize = True ) : print ( 'Build CSS' ) args = { } args [ 'style' ] = 'compressed' if minimize else 'nested' cmd = CMD_SASS . format ( * * args ) run ( cmd )
11,929
https://github.com/brutus/wdiffhtml/blob/e97b524a7945f7a626e33ec141343120c524d9fa/tasks/__init__.py#L45-L54
[ "def", "delete_network", "(", "self", ",", "tenant_name", ",", "network", ")", ":", "seg_id", "=", "network", ".", "segmentation_id", "network_info", "=", "{", "'organizationName'", ":", "tenant_name", ",", "'partitionName'", ":", "self", ".", "_part_name", ",", "'segmentId'", ":", "seg_id", ",", "}", "LOG", ".", "debug", "(", "\"Deleting %s network in DCNM.\"", ",", "network_info", ")", "res", "=", "self", ".", "_delete_network", "(", "network_info", ")", "if", "res", "and", "res", ".", "status_code", "in", "self", ".", "_resp_ok", ":", "LOG", ".", "debug", "(", "\"Deleted %s network in DCNM.\"", ",", "network_info", ")", "else", ":", "LOG", ".", "error", "(", "\"Failed to delete %s network in DCNM.\"", ",", "network_info", ")", "raise", "dexc", ".", "DfaClientRequestFailed", "(", "reason", "=", "res", ")" ]
Decorator Execute cProfile
def profile ( func ) : def _f ( * args , * * kwargs ) : print ( "\n<<<---" ) pr = cProfile . Profile ( ) pr . enable ( ) res = func ( * args , * * kwargs ) p = pstats . Stats ( pr ) p . strip_dirs ( ) . sort_stats ( 'cumtime' ) . print_stats ( 20 ) print ( "\n--->>>" ) return res return _f
11,930
https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/util/debug.py#L17-L30
[ "def", "rl_cleanspaces", "(", "x", ")", ":", "patterns", "=", "(", "# arguments for re.sub: pattern and repl", "# удаляем пробел перед знаками препинания", "(", "r' +([\\.,?!\\)]+)'", ",", "r'\\1'", ")", ",", "# добавляем пробел после знака препинания, если только за ним нет другого", "(", "r'([\\.,?!\\)]+)([^\\.!,?\\)]+)'", ",", "r'\\1 \\2'", ")", ",", "# убираем пробел после открывающей скобки", "(", "r'(\\S+)\\s*(\\()\\s*(\\S+)'", ",", "r'\\1 (\\3'", ")", ",", ")", "# удаляем двойные, начальные и конечные пробелы", "return", "os", ".", "linesep", ".", "join", "(", "' '", ".", "join", "(", "part", "for", "part", "in", "line", ".", "split", "(", "' '", ")", "if", "part", ")", "for", "line", "in", "_sub_patterns", "(", "patterns", ",", "x", ")", ".", "split", "(", "os", ".", "linesep", ")", ")" ]
Returns approximate memory size
def total_size ( obj , verbose = False ) : seen = set ( ) def sizeof ( o ) : if id ( o ) in seen : return 0 seen . add ( id ( o ) ) s = sys . getsizeof ( o , default = 0 ) if verbose : print ( s , type ( o ) , repr ( o ) ) if isinstance ( o , ( tuple , list , set , frozenset , deque ) ) : s += sum ( map ( sizeof , iter ( o ) ) ) elif isinstance ( o , dict ) : s += sum ( map ( sizeof , chain . from_iterable ( o . items ( ) ) ) ) elif "__dict__" in dir ( o ) : s += sum ( map ( sizeof , chain . from_iterable ( o . __dict__ . items ( ) ) ) ) return s return sizeof ( obj )
11,931
https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/util/debug.py#L33-L52
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Decorator Make stdout silent
def mute ( func ) : def _f ( * args , * * kwargs ) : sys . stdout = open ( os . devnull , 'w' ) res = func ( * args , * * kwargs ) sys . stdout . close ( ) sys . stdout = sys . __stdout__ return res return _f
11,932
https://github.com/mojaie/chorus/blob/fc7fe23a0272554c67671645ab07830b315eeb1b/chorus/util/debug.py#L97-L107
[ "def", "cluster_types", "(", "types", ",", "max_clust", "=", "12", ")", ":", "if", "len", "(", "types", ")", "<", "max_clust", ":", "max_clust", "=", "len", "(", "types", ")", "# Do actual clustering", "cluster_dict", "=", "do_clustering", "(", "types", ",", "max_clust", ")", "cluster_ranks", "=", "rank_clusters", "(", "cluster_dict", ")", "# Create a dictionary mapping binary numbers to indices", "ranks", "=", "{", "}", "for", "key", "in", "cluster_dict", ":", "for", "typ", "in", "cluster_dict", "[", "key", "]", ":", "ranks", "[", "typ", "]", "=", "cluster_ranks", "[", "key", "]", "return", "ranks" ]
Insert HTML theme configurations .
def _insert_html_configs ( c , * , project_name , short_project_name ) : # Use the lsst-sphinx-bootstrap-theme c [ 'templates_path' ] = [ '_templates' , lsst_sphinx_bootstrap_theme . get_html_templates_path ( ) ] c [ 'html_theme' ] = 'lsst_sphinx_bootstrap_theme' c [ 'html_theme_path' ] = [ lsst_sphinx_bootstrap_theme . get_html_theme_path ( ) ] # Theme options are theme-specific and customize the look and feel of a # theme further. For a list of options available for each theme, see the # documentation. c [ 'html_theme_options' ] = { 'logotext' : short_project_name } # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". c [ 'html_title' ] = project_name # A shorter title for the navigation bar. Default is the same as # html_title. c [ 'html_short_title' ] = short_project_name # The name of an image file (relative to this directory) to place at the # top of the sidebar. c [ 'html_logo' ] = None # The name of an image file (within the static path) to use as favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or # 32x32 pixels large. c [ 'html_favicon' ] = None # Add any paths that contain custom static files (such as style sheets) # here, relative to this directory. They are copied after the builtin # static files, so a file named "default.css" will overwrite the builtin # "default.css". if os . path . isdir ( '_static' ) : c [ 'html_static_path' ] = [ '_static' ] else : # If a project does not have a _static/ directory, don't list it # so that there isn't a warning. c [ 'html_static_path' ] = [ ] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. c [ 'html_last_updated_fmt' ] = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. c [ 'html_use_smartypants' ] = True # If false, no module index is generated. c [ 'html_domain_indices' ] = False # If false, no index is generated. c [ 'html_use_index' ] = False # If true, the index is split into individual pages for each letter. c [ 'html_split_index' ] = False # If true, links to the reST sources are added to the pages. c [ 'html_show_sourcelink' ] = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is # True. c [ 'html_show_sphinx' ] = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is # True. c [ 'html_show_copyright' ] = True # If true, an OpenSearch description file will be output, and all pages # will contain a <link> tag referring to it. The value of this option must # be the base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). c [ 'html_file_suffix' ] = '.html' # Language to be used for generating the HTML full-text search index. c [ 'html_search_language' ] = 'en' # A dictionary with options for the search language support, empty by # default. Now only 'ja' uses this config value # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) # that implements a search results scorer. If empty, the default will be # used. # html_search_scorer = 'scorer.js' return c
11,933
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L71-L167
[ "def", "configure_splitevaluator", "(", "self", ")", ":", "if", "self", ".", "classification", ":", "speval", "=", "javabridge", ".", "make_instance", "(", "\"weka/experiment/ClassifierSplitEvaluator\"", ",", "\"()V\"", ")", "else", ":", "speval", "=", "javabridge", ".", "make_instance", "(", "\"weka/experiment/RegressionSplitEvaluator\"", ",", "\"()V\"", ")", "classifier", "=", "javabridge", ".", "call", "(", "speval", ",", "\"getClassifier\"", ",", "\"()Lweka/classifiers/Classifier;\"", ")", "return", "speval", ",", "classifier" ]
Add common core Sphinx configurations to the state .
def _insert_common_sphinx_configs ( c , * , project_name ) : c [ 'project' ] = project_name # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: c [ 'source_suffix' ] = '.rst' # The encoding of source files. c [ 'source_encoding' ] = 'utf-8-sig' # The master toctree document. c [ 'master_doc' ] = 'index' # Configure figure numbering c [ 'numfig' ] = True c [ 'numfig_format' ] = { 'figure' : 'Figure %s' , 'table' : 'Table %s' , 'code-block' : 'Listing %s' } # The reST default role (used for this markup: `text`) c [ 'default_role' ] = 'obj' # This is added to the end of RST files - a good place to put substitutions # to be used globally. c [ 'rst_epilog' ] = """ .. _Astropy: http://astropy.org """ # A list of warning types to suppress arbitrary warning messages. We mean # to override directives in # astropy_helpers.sphinx.ext.autodoc_enhancements, thus need to ignore # those warning. This can be removed once the patch gets released in # upstream Sphinx (https://github.com/sphinx-doc/sphinx/pull/1843). # Suppress the warnings requires Sphinx v1.4.2 c [ 'suppress_warnings' ] = [ 'app.add_directive' , ] return c
11,934
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L170-L208
[ "def", "_is_bval_type_a", "(", "grouped_dicoms", ")", ":", "bval_tag", "=", "Tag", "(", "0x2001", ",", "0x1003", ")", "bvec_x_tag", "=", "Tag", "(", "0x2005", ",", "0x10b0", ")", "bvec_y_tag", "=", "Tag", "(", "0x2005", ",", "0x10b1", ")", "bvec_z_tag", "=", "Tag", "(", "0x2005", ",", "0x10b2", ")", "for", "group", "in", "grouped_dicoms", ":", "if", "bvec_x_tag", "in", "group", "[", "0", "]", "and", "_is_float", "(", "common", ".", "get_fl_value", "(", "group", "[", "0", "]", "[", "bvec_x_tag", "]", ")", ")", "and", "bvec_y_tag", "in", "group", "[", "0", "]", "and", "_is_float", "(", "common", ".", "get_fl_value", "(", "group", "[", "0", "]", "[", "bvec_y_tag", "]", ")", ")", "and", "bvec_z_tag", "in", "group", "[", "0", "]", "and", "_is_float", "(", "common", ".", "get_fl_value", "(", "group", "[", "0", "]", "[", "bvec_z_tag", "]", ")", ")", "and", "bval_tag", "in", "group", "[", "0", "]", "and", "_is_float", "(", "common", ".", "get_fl_value", "(", "group", "[", "0", "]", "[", "bval_tag", "]", ")", ")", "and", "common", ".", "get_fl_value", "(", "group", "[", "0", "]", "[", "bval_tag", "]", ")", "!=", "0", ":", "return", "True", "return", "False" ]
Add breathe extension configurations to the state .
def _insert_breathe_configs ( c , * , project_name , doxygen_xml_dirname ) : if doxygen_xml_dirname is not None : c [ 'breathe_projects' ] = { project_name : doxygen_xml_dirname } c [ 'breathe_default_project' ] = project_name return c
11,935
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L211-L217
[ "def", "attach", "(", "self", ",", "stdout", "=", "True", ",", "stderr", "=", "True", ",", "stream", "=", "True", ",", "logs", "=", "False", ")", ":", "try", ":", "data", "=", "parse_stream", "(", "self", ".", "client", ".", "attach", "(", "self", ".", "id", ",", "stdout", ",", "stderr", ",", "stream", ",", "logs", ")", ")", "except", "KeyboardInterrupt", ":", "logger", ".", "warning", "(", "\"service container: {0} has been interrupted. \"", "\"The container will be stopped but will not be deleted.\"", ".", "format", "(", "self", ".", "name", ")", ")", "data", "=", "None", "self", ".", "stop", "(", ")", "return", "data" ]
Add configurations related to automodapi autodoc and numpydoc to the state .
def _insert_automodapi_configs ( c ) : # Don't show summaries of the members in each class along with the # class' docstring c [ 'numpydoc_show_class_members' ] = False c [ 'autosummary_generate' ] = True c [ 'automodapi_toctreedirnm' ] = 'py-api' c [ 'automodsumm_inherited_members' ] = True # Docstrings for classes and methods are inherited from parents. c [ 'autodoc_inherit_docstrings' ] = True # Class documentation should only contain the class docstring and # ignore the __init__ docstring, account to LSST coding standards. # c['autoclass_content'] = "both" c [ 'autoclass_content' ] = "class" # Default flags for automodapi directives. Special members are dunder # methods. # NOTE: We want to used `inherited-members`, but it seems to be causing # documentation duplication in the automodapi listings. We're leaving # this out for now. See https://jira.lsstcorp.org/browse/DM-14782 for # additional notes. # NOTE: Without inherited members set, special-members doesn't need seem # to have an effect (even for special members where the docstrings are # directly written in the class, not inherited. # c['autodoc_default_flags'] = ['inherited-members'] c [ 'autodoc_default_flags' ] = [ 'show-inheritance' , 'special-members' ] return c
11,936
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L220-L254
[ "def", "get_variation_from_id", "(", "self", ",", "experiment_key", ",", "variation_id", ")", ":", "variation_map", "=", "self", ".", "variation_id_map", ".", "get", "(", "experiment_key", ")", "if", "variation_map", ":", "variation", "=", "variation_map", ".", "get", "(", "variation_id", ")", "if", "variation", ":", "return", "variation", "else", ":", "self", ".", "logger", ".", "error", "(", "'Variation ID \"%s\" is not in datafile.'", "%", "variation_id", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidVariationException", "(", "enums", ".", "Errors", ".", "INVALID_VARIATION_ERROR", ")", ")", "return", "None", "self", ".", "logger", ".", "error", "(", "'Experiment key \"%s\" is not in datafile.'", "%", "experiment_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Add configurations related to matplotlib s plot directive to the state .
def _insert_matplotlib_configs ( c ) : if 'extensions' not in c : c [ 'extensions' ] = [ ] try : import matplotlib . sphinxext . plot_directive c [ 'extensions' ] += [ matplotlib . sphinxext . plot_directive . __name__ ] except ( ImportError , AttributeError ) : # AttributeError is checked here in case matplotlib is installed but # Sphinx isn't. Note that this module is imported by the config file # generator, even if we're not building the docs. warnings . warn ( "matplotlib's plot_directive could not be imported. " "Inline plots will not be included in the output." ) return c
11,937
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L257-L274
[ "def", "cudnnSetPooling2dDescriptor", "(", "poolingDesc", ",", "mode", ",", "windowHeight", ",", "windowWidth", ",", "verticalPadding", ",", "horizontalPadding", ",", "verticalStride", ",", "horizontalStride", ")", ":", "status", "=", "_libcudnn", ".", "cudnnSetPooling2dDescriptor", "(", "poolingDesc", ",", "mode", ",", "windowHeight", ",", "windowWidth", ",", "verticalPadding", ",", "horizontalPadding", ",", "verticalStride", ",", "horizontalStride", ")", "cudnnCheckStatus", "(", "status", ")" ]
Insert version information into the configuration namespace .
def _insert_single_package_eups_version ( c , eups_version ) : c [ 'release_eups_tag' ] = 'current' c [ 'release_git_ref' ] = 'master' c [ 'version' ] = eups_version c [ 'release' ] = eups_version c [ 'scipipe_conda_ref' ] = 'master' c [ 'pipelines_demo_ref' ] = 'master' c [ 'newinstall_ref' ] = 'master' return c
11,938
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L298-L333
[ "def", "rmp_rng", "(", "aryVls", ",", "varNewMin", ",", "varNewMax", ",", "varOldThrMin", "=", "None", ",", "varOldAbsMax", "=", "None", ")", ":", "if", "varOldThrMin", "is", "None", ":", "varOldMin", "=", "aryVls", ".", "min", "(", ")", "else", ":", "varOldMin", "=", "varOldThrMin", "if", "varOldAbsMax", "is", "None", ":", "varOldMax", "=", "aryVls", ".", "max", "(", ")", "else", ":", "varOldMax", "=", "varOldAbsMax", "aryNewVls", "=", "np", ".", "empty", "(", "(", "aryVls", ".", "shape", ")", ",", "dtype", "=", "aryVls", ".", "dtype", ")", "for", "ind", ",", "val", "in", "enumerate", "(", "aryVls", ")", ":", "aryNewVls", "[", "ind", "]", "=", "(", "(", "(", "val", "-", "varOldMin", ")", "*", "(", "varNewMax", "-", "varNewMin", ")", ")", "/", "(", "varOldMax", "-", "varOldMin", ")", ")", "+", "varNewMin", "return", "aryNewVls" ]
Insert information about the current EUPS tag into the configuration namespace .
def _insert_eups_version ( c ) : # Attempt to get the eups tag from the build environment eups_tag = os . getenv ( 'EUPS_TAG' ) if eups_tag is None : eups_tag = 'd_latest' # Try to guess the git ref that corresponds to this tag if eups_tag in ( 'd_latest' , 'w_latest' , 'current' ) : git_ref = 'master' elif eups_tag . startswith ( 'd_' ) : # Daily EUPS tags are not tagged on git git_ref = 'master' elif eups_tag . startswith ( 'v' ) : # Major version or release candidate tag git_ref = eups_tag . lstrip ( 'v' ) . replace ( '_' , '.' ) elif eups_tag . startswith ( 'w_' ) : # Regular weekly tag git_ref = eups_tag . replace ( '_' , '.' ) else : # Ideally shouldn't get to this point git_ref = 'master' # Now set variables for the Jinja context c [ 'release_eups_tag' ] = eups_tag c [ 'release_git_ref' ] = git_ref c [ 'version' ] = eups_tag c [ 'release' ] = eups_tag c [ 'scipipe_conda_ref' ] = git_ref c [ 'pipelines_demo_ref' ] = git_ref c [ 'newinstall_ref' ] = git_ref return c
11,939
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L336-L386
[ "def", "shuffle_columns", "(", "a", ")", ":", "mask", "=", "range", "(", "a", ".", "text_size", ")", "random", ".", "shuffle", "(", "mask", ")", "for", "c", "in", "a", ".", "components", ":", "c", ".", "text", "=", "''", ".", "join", "(", "[", "c", ".", "text", "[", "i", "]", "for", "i", "in", "mask", "]", ")" ]
Build a dict of Sphinx configurations that populate the conf . py of the main pipelines_lsst_io Sphinx project for LSST Science Pipelines documentation .
def build_pipelines_lsst_io_configs ( * , project_name , copyright = None ) : # Work around Sphinx bug related to large and highly-nested source files sys . setrecursionlimit ( 2000 ) c = { } c = _insert_common_sphinx_configs ( c , project_name = project_name ) # HTML theme c = _insert_html_configs ( c , project_name = project_name , short_project_name = project_name ) # Sphinx extension modules c = _insert_extensions ( c ) # Intersphinx configuration c = _insert_intersphinx_mapping ( c ) # Breathe extension configuration # FIXME configure this for multiple sites # Automodapi and numpydoc configurations c = _insert_automodapi_configs ( c ) # Matplotlib configurations c = _insert_matplotlib_configs ( c ) # Graphviz configurations c = _insert_graphviz_configs ( c ) # Add versioning information c = _insert_eups_version ( c ) # Always use "now" as the date for the main site's docs because we can't # look at the Git history of each stack package. date = datetime . datetime . now ( ) c [ 'today' ] = date . strftime ( '%Y-%m-%d' ) # Use this copyright for now. Ultimately we want to gather COPYRIGHT files # and build an integrated copyright that way. c [ 'copyright' ] = '2015-{year} LSST contributors' . format ( year = date . year ) # Hide todo directives in the "published" documentation on the main site. c [ 'todo_include_todos' ] = False # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. c [ 'exclude_patterns' ] = [ 'README.rst' , # Build products '_build' , # Source for release notes (contents are included in built pages) 'releases/note-source/*.rst' , 'releases/tickets-source/*.rst' , # EUPS configuration directory 'ups' , # Recommended directory for pip installing doc eng Python packages '.pyvenv' , # GitHub templates '.github' , # This 'home' directory is created by the docubase image for the # sqre/infra/documenteer ci.lsst.codes Jenkins job. Ideally this # shouldn't be in the directory at all, but we certainly need to # ignore it while its here. 'home' , ] # Insert rst_epilog configuration c = _insert_rst_epilog ( c ) # Set up the context for the sphinx-jinja extension c = _insert_jinja_configuration ( c ) return c
11,940
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxconfig/stackconf.py#L533-L647
[ "def", "sort2groups", "(", "array", ",", "gpat", "=", "[", "'_R1'", ",", "'_R2'", "]", ")", ":", "groups", "=", "[", "REGroup", "(", "gp", ")", "for", "gp", "in", "gpat", "]", "unmatched", "=", "[", "]", "for", "item", "in", "array", ":", "matched", "=", "False", "for", "m", "in", "groups", ":", "if", "m", ".", "match", "(", "item", ")", ":", "matched", "=", "True", "break", "if", "not", "matched", ":", "unmatched", ".", "append", "(", "item", ")", "return", "[", "sorted", "(", "m", ".", "list", ")", "for", "m", "in", "groups", "]", ",", "sorted", "(", "unmatched", ")" ]
Setup the plugin .
def setup ( self , app ) : super ( ) . setup ( app ) self . cfg . port = int ( self . cfg . port ) self . cfg . db = int ( self . cfg . db ) self . cfg . poolsize = int ( self . cfg . poolsize )
11,941
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L49-L54
[ "def", "verify", "(", "self", ")", ":", "c", "=", "self", ".", "database", ".", "cursor", "(", ")", "non_exist", "=", "set", "(", ")", "no_db_entry", "=", "set", "(", "os", ".", "listdir", "(", "self", ".", "cache_dir", ")", ")", "try", ":", "no_db_entry", ".", "remove", "(", "'file_database.db'", ")", "no_db_entry", ".", "remove", "(", "'file_database.db-journal'", ")", "except", ":", "pass", "for", "row", "in", "c", ".", "execute", "(", "\"SELECT path FROM files\"", ")", ":", "path", "=", "row", "[", "0", "]", "repo_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "cache_dir", ",", "path", ")", "if", "os", ".", "path", ".", "exists", "(", "repo_path", ")", ":", "no_db_entry", ".", "remove", "(", "path", ")", "else", ":", "non_exist", ".", "add", "(", "path", ")", "if", "len", "(", "non_exist", ")", ">", "0", ":", "raise", "Exception", "(", "\"Found {} records in db for files that don't exist: {}\"", ".", "format", "(", "len", "(", "non_exist", ")", ",", "','", ".", "join", "(", "non_exist", ")", ")", ")", "if", "len", "(", "no_db_entry", ")", ">", "0", ":", "raise", "Exception", "(", "\"Found {} files that don't have db entries: {}\"", ".", "format", "(", "len", "(", "no_db_entry", ")", ",", "','", ".", "join", "(", "no_db_entry", ")", ")", ")" ]
Connect to Redis .
async def startup ( self , app ) : if self . cfg . fake : if not FakeConnection : raise PluginException ( 'Install fakeredis for fake connections.' ) self . conn = await FakeConnection . create ( ) if self . cfg . pubsub : self . pubsub_conn = self . conn else : try : if self . cfg . poolsize <= 1 : self . conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) else : self . conn = await asyncio . wait_for ( asyncio_redis . Pool . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , poolsize = self . cfg . poolsize , ) , self . cfg . timeout ) if self . cfg . pubsub : self . pubsub_conn = await asyncio . wait_for ( asyncio_redis . Connection . create ( host = self . cfg . host , port = self . cfg . port , password = self . cfg . password , db = self . cfg . db , ) , self . cfg . timeout ) except asyncio . TimeoutError : raise PluginException ( 'Muffin-redis connection timeout.' ) if self . cfg . pubsub : self . pubsub_subscription = await self . pubsub_conn . start_subscribe ( ) self . pubsub_reader = ensure_future ( self . _pubsub_reader_proc ( ) , loop = self . app . loop )
11,942
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L56-L92
[ "def", "accel_calibration", "(", "self", ")", ":", "self", ".", "calibration_state", "=", "self", ".", "CAL_ACC", "self", ".", "acc_dialog", "=", "SK8AccDialog", "(", "self", ".", "sk8", ".", "get_imu", "(", "self", ".", "spinIMU", ".", "value", "(", ")", ")", ",", "self", ")", "if", "self", ".", "acc_dialog", ".", "exec_", "(", ")", "==", "QDialog", ".", "Rejected", ":", "return", "self", ".", "calculate_acc_calibration", "(", "self", ".", "acc_dialog", ".", "samples", ")" ]
Close self connections .
async def cleanup ( self , app ) : self . conn . close ( ) if self . pubsub_conn : self . pubsub_reader . cancel ( ) self . pubsub_conn . close ( ) # give connections a chance to actually terminate # TODO: use better method once it will be added, # see https://github.com/jonathanslenders/asyncio-redis/issues/56 await asyncio . sleep ( 0 )
11,943
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L94-L103
[ "def", "download_storyitem", "(", "self", ",", "item", ":", "StoryItem", ",", "target", ":", "str", ")", "->", "bool", ":", "date_local", "=", "item", ".", "date_local", "dirname", "=", "_PostPathFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "dirname_pattern", ",", "target", "=", "target", ")", "filename", "=", "dirname", "+", "'/'", "+", "self", ".", "format_filename", "(", "item", ",", "target", "=", "target", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "filename", ")", ",", "exist_ok", "=", "True", ")", "downloaded", "=", "False", "if", "not", "item", ".", "is_video", "or", "self", ".", "download_video_thumbnails", "is", "True", ":", "url", "=", "item", ".", "url", "downloaded", "=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "url", ",", "mtime", "=", "date_local", ")", "if", "item", ".", "is_video", "and", "self", ".", "download_videos", "is", "True", ":", "downloaded", "|=", "self", ".", "download_pic", "(", "filename", "=", "filename", ",", "url", "=", "item", ".", "video_url", ",", "mtime", "=", "date_local", ")", "# Save caption if desired", "metadata_string", "=", "_ArbitraryItemFormatter", "(", "item", ")", ".", "format", "(", "self", ".", "storyitem_metadata_txt_pattern", ")", ".", "strip", "(", ")", "if", "metadata_string", ":", "self", ".", "save_caption", "(", "filename", "=", "filename", ",", "mtime", "=", "item", ".", "date_local", ",", "caption", "=", "metadata_string", ")", "# Save metadata as JSON if desired.", "if", "self", ".", "save_metadata", "is", "not", "False", ":", "self", ".", "save_metadata_json", "(", "filename", ",", "item", ")", "self", ".", "context", ".", "log", "(", ")", "return", "downloaded" ]
Store the given value into Redis .
def set ( self , key , value , * args , * * kwargs ) : if self . cfg . jsonpickle : value = jsonpickle . encode ( value ) return self . conn . set ( key , value , * args , * * kwargs )
11,944
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L105-L112
[ "def", "parse_from_file", "(", "filename", ",", "nodata", "=", "False", ")", ":", "header", "=", "None", "with", "open", "(", "filename", ",", "\"rb\"", ")", "as", "file", ":", "header", "=", "read_machine_header", "(", "file", ")", "meta_raw", "=", "file", ".", "read", "(", "header", "[", "'meta_len'", "]", ")", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "file", ".", "read", "(", "header", "[", "'data_len'", "]", ")", ")", "return", "header", ",", "meta", ",", "data" ]
Decode the value .
async def get ( self , key ) : value = await self . conn . get ( key ) if self . cfg . jsonpickle : if isinstance ( value , bytes ) : return jsonpickle . decode ( value . decode ( 'utf-8' ) ) if isinstance ( value , str ) : return jsonpickle . decode ( value ) return value
11,945
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L114-L124
[ "def", "_weave_layers_graft", "(", "*", ",", "pdf_base", ",", "page_num", ",", "text", ",", "font", ",", "font_key", ",", "procset", ",", "rotation", ",", "strip_old_text", ",", "log", ")", ":", "log", ".", "debug", "(", "\"Grafting\"", ")", "if", "Path", "(", "text", ")", ".", "stat", "(", ")", ".", "st_size", "==", "0", ":", "return", "# This is a pointer indicating a specific page in the base file", "pdf_text", "=", "pikepdf", ".", "open", "(", "text", ")", "pdf_text_contents", "=", "pdf_text", ".", "pages", "[", "0", "]", ".", "Contents", ".", "read_bytes", "(", ")", "if", "not", "tesseract", ".", "has_textonly_pdf", "(", ")", ":", "# If we don't have textonly_pdf, edit the stream to delete the", "# instruction to draw the image Tesseract generated, which we do not", "# use.", "stream", "=", "bytearray", "(", "pdf_text_contents", ")", "pattern", "=", "b'/Im1 Do'", "idx", "=", "stream", ".", "find", "(", "pattern", ")", "stream", "[", "idx", ":", "(", "idx", "+", "len", "(", "pattern", ")", ")", "]", "=", "b' '", "*", "len", "(", "pattern", ")", "pdf_text_contents", "=", "bytes", "(", "stream", ")", "base_page", "=", "pdf_base", ".", "pages", ".", "p", "(", "page_num", ")", "# The text page always will be oriented up by this stage but the original", "# content may have a rotation applied. Wrap the text stream with a rotation", "# so it will be oriented the same way as the rest of the page content.", "# (Previous versions OCRmyPDF rotated the content layer to match the text.)", "mediabox", "=", "[", "float", "(", "pdf_text", ".", "pages", "[", "0", "]", ".", "MediaBox", "[", "v", "]", ")", "for", "v", "in", "range", "(", "4", ")", "]", "wt", ",", "ht", "=", "mediabox", "[", "2", "]", "-", "mediabox", "[", "0", "]", ",", "mediabox", "[", "3", "]", "-", "mediabox", "[", "1", "]", "mediabox", "=", "[", "float", "(", "base_page", ".", "MediaBox", "[", "v", "]", ")", "for", "v", "in", "range", "(", "4", ")", "]", "wp", ",", "hp", "=", "mediabox", "[", "2", "]", "-", "mediabox", "[", "0", "]", ",", "mediabox", "[", "3", "]", "-", "mediabox", "[", "1", "]", "translate", "=", "pikepdf", ".", "PdfMatrix", "(", ")", ".", "translated", "(", "-", "wt", "/", "2", ",", "-", "ht", "/", "2", ")", "untranslate", "=", "pikepdf", ".", "PdfMatrix", "(", ")", ".", "translated", "(", "wp", "/", "2", ",", "hp", "/", "2", ")", "# -rotation because the input is a clockwise angle and this formula", "# uses CCW", "rotation", "=", "-", "rotation", "%", "360", "rotate", "=", "pikepdf", ".", "PdfMatrix", "(", ")", ".", "rotated", "(", "rotation", ")", "# Because of rounding of DPI, we might get a text layer that is not", "# identically sized to the target page. Scale to adjust. Normally this", "# is within 0.998.", "if", "rotation", "in", "(", "90", ",", "270", ")", ":", "wt", ",", "ht", "=", "ht", ",", "wt", "scale_x", "=", "wp", "/", "wt", "scale_y", "=", "hp", "/", "ht", "log", ".", "debug", "(", "'%r'", ",", "(", "scale_x", ",", "scale_y", ")", ")", "scale", "=", "pikepdf", ".", "PdfMatrix", "(", ")", ".", "scaled", "(", "scale_x", ",", "scale_y", ")", "# Translate the text so it is centered at (0, 0), rotate it there, adjust", "# for a size different between initial and text PDF, then untranslate", "ctm", "=", "translate", "@", "rotate", "@", "scale", "@", "untranslate", "pdf_text_contents", "=", "b'q %s cm\\n'", "%", "ctm", ".", "encode", "(", ")", "+", "pdf_text_contents", "+", "b'\\nQ\\n'", "new_text_layer", "=", "pikepdf", ".", "Stream", "(", "pdf_base", ",", "pdf_text_contents", ")", "if", "strip_old_text", ":", "strip_invisible_text", "(", "pdf_base", ",", "base_page", ",", "log", ")", "base_page", ".", "page_contents_add", "(", "new_text_layer", ",", "prepend", "=", "True", ")", "_update_page_resources", "(", "page", "=", "base_page", ",", "font", "=", "font", ",", "font_key", "=", "font_key", ",", "procset", "=", "procset", ")", "pdf_text", ".", "close", "(", ")" ]
Publish message to channel .
def publish ( self , channel , message ) : if self . cfg . jsonpickle : message = jsonpickle . encode ( message ) return self . conn . publish ( channel , message )
11,946
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L126-L133
[ "def", "mol_supplier", "(", "lines", ",", "no_halt", ",", "assign_descriptors", ")", ":", "def", "sdf_block", "(", "lns", ")", ":", "mol", "=", "[", "]", "opt", "=", "[", "]", "is_mol", "=", "True", "for", "line", "in", "lns", ":", "if", "line", ".", "startswith", "(", "\"$$$$\"", ")", ":", "yield", "mol", "[", ":", "]", ",", "opt", "[", ":", "]", "is_mol", "=", "True", "mol", ".", "clear", "(", ")", "opt", ".", "clear", "(", ")", "elif", "line", ".", "startswith", "(", "\"M END\"", ")", ":", "is_mol", "=", "False", "elif", "is_mol", ":", "mol", ".", "append", "(", "line", ".", "rstrip", "(", ")", ")", "else", ":", "opt", ".", "append", "(", "line", ".", "rstrip", "(", ")", ")", "if", "mol", ":", "yield", "mol", ",", "opt", "for", "i", ",", "(", "mol", ",", "opt", ")", "in", "enumerate", "(", "sdf_block", "(", "lines", ")", ")", ":", "try", ":", "c", "=", "molecule", "(", "mol", ")", "if", "assign_descriptors", ":", "molutil", ".", "assign_descriptors", "(", "c", ")", "except", "ValueError", "as", "err", ":", "if", "no_halt", ":", "print", "(", "\"Unsupported symbol: {} (#{} in v2000reader)\"", ".", "format", "(", "err", ",", "i", "+", "1", ")", ")", "c", "=", "molutil", ".", "null_molecule", "(", "assign_descriptors", ")", "else", ":", "raise", "ValueError", "(", "\"Unsupported symbol: {}\"", ".", "format", "(", "err", ")", ")", "except", "RuntimeError", "as", "err", ":", "if", "no_halt", ":", "print", "(", "\"Failed to minimize ring: {} (#{} in v2000reader)\"", ".", "format", "(", "err", ",", "i", "+", "1", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"Failed to minimize ring: {}\"", ".", "format", "(", "err", ")", ")", "except", ":", "if", "no_halt", ":", "print", "(", "\"Unexpected error (#{} in v2000reader)\"", ".", "format", "(", "i", "+", "1", ")", ")", "c", "=", "molutil", ".", "null_molecule", "(", "assign_descriptors", ")", "c", ".", "data", "=", "optional_data", "(", "opt", ")", "yield", "c", "continue", "else", ":", "print", "(", "traceback", ".", "format_exc", "(", ")", ")", "raise", "Exception", "(", "\"Unsupported Error\"", ")", "c", ".", "data", "=", "optional_data", "(", "opt", ")", "yield", "c" ]
Create a new Subscription context manager .
def start_subscribe ( self ) : if not self . conn : raise ValueError ( 'Not connected' ) elif not self . pubsub_conn : raise ValueError ( 'PubSub not enabled' ) # creates a new context manager return Subscription ( self )
11,947
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L135-L143
[ "def", "write", "(", "filename", ",", "data", ",", "extname", "=", "None", ",", "extver", "=", "None", ",", "units", "=", "None", ",", "compress", "=", "None", ",", "table_type", "=", "'binary'", ",", "header", "=", "None", ",", "clobber", "=", "False", ",", "*", "*", "keys", ")", ":", "with", "FITS", "(", "filename", ",", "'rw'", ",", "clobber", "=", "clobber", ",", "*", "*", "keys", ")", "as", "fits", ":", "fits", ".", "write", "(", "data", ",", "table_type", "=", "table_type", ",", "units", "=", "units", ",", "extname", "=", "extname", ",", "extver", "=", "extver", ",", "compress", "=", "compress", ",", "header", "=", "header", ",", "*", "*", "keys", ")" ]
Subscribe to given channel .
async def _subscribe ( self , channels , is_mask ) : news = [ ] for channel in channels : key = channel , is_mask self . _channels . append ( key ) if key in self . _plugin . _subscriptions : self . _plugin . _subscriptions [ key ] . append ( self . _queue ) else : self . _plugin . _subscriptions [ key ] = [ self . _queue ] news . append ( channel ) if news : await getattr ( self . _sub , 'psubscribe' if is_mask else 'subscribe' ) ( news )
11,948
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L229-L241
[ "def", "write_workbook", "(", "filename", ",", "table_list", ",", "column_width", "=", "None", ")", ":", "# Modify default header format", "# Pandas' default header format is bold text with thin borders. Here we", "# use bold text only, without borders.", "# The header style structure is in pd.core.format in pandas<=0.18.0,", "# pd.formats.format in 0.18.1<=pandas<0.20, and pd.io.formats.excel in", "# pandas>=0.20.", "# Also, wrap in a try-except block in case style structure is not found.", "format_module_found", "=", "False", "try", ":", "# Get format module", "if", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<=", "packaging", ".", "version", ".", "parse", "(", "'0.18'", ")", ":", "format_module", "=", "pd", ".", "core", ".", "format", "elif", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<", "packaging", ".", "version", ".", "parse", "(", "'0.20'", ")", ":", "format_module", "=", "pd", ".", "formats", ".", "format", "else", ":", "import", "pandas", ".", "io", ".", "formats", ".", "excel", "as", "format_module", "# Save previous style, replace, and indicate that previous style should", "# be restored at the end", "old_header_style", "=", "format_module", ".", "header_style", "format_module", ".", "header_style", "=", "{", "\"font\"", ":", "{", "\"bold\"", ":", "True", "}", "}", "format_module_found", "=", "True", "except", "AttributeError", "as", "e", ":", "pass", "# Generate output writer object", "writer", "=", "pd", ".", "ExcelWriter", "(", "filename", ",", "engine", "=", "'xlsxwriter'", ")", "# Write tables", "for", "sheet_name", ",", "df", "in", "table_list", ":", "# Convert index names to regular columns", "df", "=", "df", ".", "reset_index", "(", ")", "# Write to an Excel sheet", "df", ".", "to_excel", "(", "writer", ",", "sheet_name", "=", "sheet_name", ",", "index", "=", "False", ")", "# Set column width", "if", "column_width", "is", "None", ":", "for", "i", ",", "(", "col_name", ",", "column", ")", "in", "enumerate", "(", "six", ".", "iteritems", "(", "df", ")", ")", ":", "# Get the maximum number of characters in a column", "max_chars_col", "=", "column", ".", "astype", "(", "str", ")", ".", "str", ".", "len", "(", ")", ".", "max", "(", ")", "max_chars_col", "=", "max", "(", "len", "(", "col_name", ")", ",", "max_chars_col", ")", "# Write width", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "i", ",", "i", ",", "width", "=", "1.", "*", "max_chars_col", ")", "else", ":", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "0", ",", "len", "(", "df", ".", "columns", ")", "-", "1", ",", "width", "=", "column_width", ")", "# Write excel file", "writer", ".", "save", "(", ")", "# Restore previous header format", "if", "format_module_found", ":", "format_module", ".", "header_style", "=", "old_header_style" ]
Unsubscribe from given channel .
async def _unsubscribe ( self , channels , is_mask ) : vanished = [ ] if channels : for channel in channels : key = channel , is_mask self . _channels . remove ( key ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : # we were last sub? vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] else : while self . _channels : channel , is_mask = key = self . _channels . pop ( ) self . _plugin . _subscriptions [ key ] . remove ( self . _queue ) if not self . _plugin . _subscriptions [ key ] : vanished . append ( channel ) del self . _plugin . _subscriptions [ key ] if vanished : await getattr ( self . _sub , 'punsubscribe' if is_mask else 'unsubscribe' ) ( vanished )
11,949
https://github.com/klen/muffin-redis/blob/b0cb8c1ba1511d501c2084def156710e75aaf781/muffin_redis.py#L243-L262
[ "def", "write_workbook", "(", "filename", ",", "table_list", ",", "column_width", "=", "None", ")", ":", "# Modify default header format", "# Pandas' default header format is bold text with thin borders. Here we", "# use bold text only, without borders.", "# The header style structure is in pd.core.format in pandas<=0.18.0,", "# pd.formats.format in 0.18.1<=pandas<0.20, and pd.io.formats.excel in", "# pandas>=0.20.", "# Also, wrap in a try-except block in case style structure is not found.", "format_module_found", "=", "False", "try", ":", "# Get format module", "if", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<=", "packaging", ".", "version", ".", "parse", "(", "'0.18'", ")", ":", "format_module", "=", "pd", ".", "core", ".", "format", "elif", "packaging", ".", "version", ".", "parse", "(", "pd", ".", "__version__", ")", "<", "packaging", ".", "version", ".", "parse", "(", "'0.20'", ")", ":", "format_module", "=", "pd", ".", "formats", ".", "format", "else", ":", "import", "pandas", ".", "io", ".", "formats", ".", "excel", "as", "format_module", "# Save previous style, replace, and indicate that previous style should", "# be restored at the end", "old_header_style", "=", "format_module", ".", "header_style", "format_module", ".", "header_style", "=", "{", "\"font\"", ":", "{", "\"bold\"", ":", "True", "}", "}", "format_module_found", "=", "True", "except", "AttributeError", "as", "e", ":", "pass", "# Generate output writer object", "writer", "=", "pd", ".", "ExcelWriter", "(", "filename", ",", "engine", "=", "'xlsxwriter'", ")", "# Write tables", "for", "sheet_name", ",", "df", "in", "table_list", ":", "# Convert index names to regular columns", "df", "=", "df", ".", "reset_index", "(", ")", "# Write to an Excel sheet", "df", ".", "to_excel", "(", "writer", ",", "sheet_name", "=", "sheet_name", ",", "index", "=", "False", ")", "# Set column width", "if", "column_width", "is", "None", ":", "for", "i", ",", "(", "col_name", ",", "column", ")", "in", "enumerate", "(", "six", ".", "iteritems", "(", "df", ")", ")", ":", "# Get the maximum number of characters in a column", "max_chars_col", "=", "column", ".", "astype", "(", "str", ")", ".", "str", ".", "len", "(", ")", ".", "max", "(", ")", "max_chars_col", "=", "max", "(", "len", "(", "col_name", ")", ",", "max_chars_col", ")", "# Write width", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "i", ",", "i", ",", "width", "=", "1.", "*", "max_chars_col", ")", "else", ":", "writer", ".", "sheets", "[", "sheet_name", "]", ".", "set_column", "(", "0", ",", "len", "(", "df", ".", "columns", ")", "-", "1", ",", "width", "=", "column_width", ")", "# Write excel file", "writer", ".", "save", "(", ")", "# Restore previous header format", "if", "format_module_found", ":", "format_module", ".", "header_style", "=", "old_header_style" ]
Bitwise xor on equal length bytearrays .
def xor ( a , b ) : return bytearray ( i ^ j for i , j in zip ( a , b ) )
11,950
https://github.com/sfstpala/pcr/blob/313ec17585565a0b9740f7b3f47d7a93bf37a7fc/pcr/aes.py#L110-L112
[ "def", "_retrieve_offsets", "(", "self", ",", "timestamps", ",", "timeout_ms", "=", "float", "(", "\"inf\"", ")", ")", ":", "if", "not", "timestamps", ":", "return", "{", "}", "start_time", "=", "time", ".", "time", "(", ")", "remaining_ms", "=", "timeout_ms", "while", "remaining_ms", ">", "0", ":", "future", "=", "self", ".", "_send_offset_requests", "(", "timestamps", ")", "self", ".", "_client", ".", "poll", "(", "future", "=", "future", ",", "timeout_ms", "=", "remaining_ms", ")", "if", "future", ".", "succeeded", "(", ")", ":", "return", "future", ".", "value", "if", "not", "future", ".", "retriable", "(", ")", ":", "raise", "future", ".", "exception", "# pylint: disable-msg=raising-bad-type", "elapsed_ms", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", "remaining_ms", "=", "timeout_ms", "-", "elapsed_ms", "if", "remaining_ms", "<", "0", ":", "break", "if", "future", ".", "exception", ".", "invalid_metadata", ":", "refresh_future", "=", "self", ".", "_client", ".", "cluster", ".", "request_update", "(", ")", "self", ".", "_client", ".", "poll", "(", "future", "=", "refresh_future", ",", "timeout_ms", "=", "remaining_ms", ")", "else", ":", "time", ".", "sleep", "(", "self", ".", "config", "[", "'retry_backoff_ms'", "]", "/", "1000.0", ")", "elapsed_ms", "=", "(", "time", ".", "time", "(", ")", "-", "start_time", ")", "*", "1000", "remaining_ms", "=", "timeout_ms", "-", "elapsed_ms", "raise", "Errors", ".", "KafkaTimeoutError", "(", "\"Failed to get offsets by timestamps in %s ms\"", "%", "(", "timeout_ms", ",", ")", ")" ]
Set a calculated value for this Expression . Used when writing formulas using XlsxWriter to give cells an initial value when the sheet is loaded without being calculated .
def value ( self ) : try : if isinstance ( self . __value , Expression ) : return self . __value . value return self . __value except AttributeError : return 0
11,951
https://github.com/fkarb/xltable/blob/7a592642d27ad5ee90d2aa8c26338abaa9d84bea/xltable/expression.py#L59-L69
[ "def", "protein_statistics", "(", "self", ")", ":", "# TODO: can i use get_dict here instead", "d", "=", "{", "}", "d", "[", "'id'", "]", "=", "self", ".", "id", "d", "[", "'sequences'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "sequences", "]", "d", "[", "'num_sequences'", "]", "=", "self", ".", "num_sequences", "if", "self", ".", "representative_sequence", ":", "d", "[", "'representative_sequence'", "]", "=", "self", ".", "representative_sequence", ".", "id", "d", "[", "'repseq_gene_name'", "]", "=", "self", ".", "representative_sequence", ".", "gene_name", "d", "[", "'repseq_uniprot'", "]", "=", "self", ".", "representative_sequence", ".", "uniprot", "d", "[", "'repseq_description'", "]", "=", "self", ".", "representative_sequence", ".", "description", "d", "[", "'num_structures'", "]", "=", "self", ".", "num_structures", "d", "[", "'experimental_structures'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_experimental_structures", "(", ")", "]", "d", "[", "'num_experimental_structures'", "]", "=", "self", ".", "num_structures_experimental", "d", "[", "'homology_models'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_homology_models", "(", ")", "]", "d", "[", "'num_homology_models'", "]", "=", "self", ".", "num_structures_homology", "if", "self", ".", "representative_structure", ":", "d", "[", "'representative_structure'", "]", "=", "self", ".", "representative_structure", ".", "id", "d", "[", "'representative_chain'", "]", "=", "self", ".", "representative_chain", "d", "[", "'representative_chain_seq_coverage'", "]", "=", "self", ".", "representative_chain_seq_coverage", "d", "[", "'repstruct_description'", "]", "=", "self", ".", "description", "if", "self", ".", "representative_structure", ".", "is_experimental", ":", "d", "[", "'repstruct_resolution'", "]", "=", "self", ".", "representative_structure", ".", "resolution", "d", "[", "'num_sequence_alignments'", "]", "=", "len", "(", "self", ".", "sequence_alignments", ")", "d", "[", "'num_structure_alignments'", "]", "=", "len", "(", "self", ".", "structure_alignments", ")", "return", "d" ]
return True if value has been set
def has_value ( self ) : try : if isinstance ( self . __value , Expression ) : return self . __value . has_value return True except AttributeError : return False
11,952
https://github.com/fkarb/xltable/blob/7a592642d27ad5ee90d2aa8c26338abaa9d84bea/xltable/expression.py#L72-L79
[ "def", "ekpsel", "(", "query", ",", "msglen", ",", "tablen", ",", "collen", ")", ":", "query", "=", "stypes", ".", "stringToCharP", "(", "query", ")", "msglen", "=", "ctypes", ".", "c_int", "(", "msglen", ")", "tablen", "=", "ctypes", ".", "c_int", "(", "tablen", ")", "collen", "=", "ctypes", ".", "c_int", "(", "collen", ")", "n", "=", "ctypes", ".", "c_int", "(", ")", "xbegs", "=", "stypes", ".", "emptyIntVector", "(", "_SPICE_EK_MAXQSEL", ")", "xends", "=", "stypes", ".", "emptyIntVector", "(", "_SPICE_EK_MAXQSEL", ")", "xtypes", "=", "stypes", ".", "emptyIntVector", "(", "_SPICE_EK_MAXQSEL", ")", "xclass", "=", "stypes", ".", "emptyIntVector", "(", "_SPICE_EK_MAXQSEL", ")", "tabs", "=", "stypes", ".", "emptyCharArray", "(", "yLen", "=", "_SPICE_EK_MAXQSEL", ",", "xLen", "=", "tablen", ")", "cols", "=", "stypes", ".", "emptyCharArray", "(", "yLen", "=", "_SPICE_EK_MAXQSEL", ",", "xLen", "=", "collen", ")", "error", "=", "ctypes", ".", "c_int", "(", ")", "errmsg", "=", "stypes", ".", "stringToCharP", "(", "msglen", ")", "libspice", ".", "ekpsel_c", "(", "query", ",", "msglen", ",", "tablen", ",", "collen", ",", "ctypes", ".", "byref", "(", "n", ")", ",", "xbegs", ",", "xends", ",", "xtypes", ",", "xclass", ",", "ctypes", ".", "byref", "(", "tabs", ")", ",", "ctypes", ".", "byref", "(", "cols", ")", ",", "ctypes", ".", "byref", "(", "error", ")", ",", "errmsg", ")", "return", "(", "n", ".", "value", ",", "stypes", ".", "cVectorToPython", "(", "xbegs", ")", "[", ":", "n", ".", "value", "]", ",", "stypes", ".", "cVectorToPython", "(", "xends", ")", "[", ":", "n", ".", "value", "]", ",", "stypes", ".", "cVectorToPython", "(", "xtypes", ")", "[", ":", "n", ".", "value", "]", ",", "stypes", ".", "cVectorToPython", "(", "xclass", ")", "[", ":", "n", ".", "value", "]", ",", "stypes", ".", "cVectorToPython", "(", "tabs", ")", "[", ":", "n", ".", "value", "]", ",", "stypes", ".", "cVectorToPython", "(", "cols", ")", "[", ":", "n", ".", "value", "]", ",", "error", ".", "value", ",", "stypes", ".", "toPythonString", "(", "errmsg", ")", ")" ]
Copy source - > destination
def copy ( source , destination , ignore = None , adapter = None , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _copy , adapter , fatal , logger , ignore = ignore )
11,953
https://github.com/zsimic/runez/blob/14363b719a1aae1528859a501a22d075ce0abfcc/src/runez/file.py#L15-L29
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Move source - > destination
def move ( source , destination , adapter = None , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _move , adapter , fatal , logger )
11,954
https://github.com/zsimic/runez/blob/14363b719a1aae1528859a501a22d075ce0abfcc/src/runez/file.py#L144-L155
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Symlink source < - destination
def symlink ( source , destination , adapter = None , must_exist = True , fatal = True , logger = LOG . debug ) : return _file_op ( source , destination , _symlink , adapter , fatal , logger , must_exist = must_exist )
11,955
https://github.com/zsimic/runez/blob/14363b719a1aae1528859a501a22d075ce0abfcc/src/runez/file.py#L158-L170
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Marks an attribute as being a part of the data defined by the soap datatype
def soap_attribute ( self , name , value ) : setattr ( self , name , value ) self . _attributes . add ( name )
11,956
https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L32-L35
[ "def", "fullscreen", "(", "self", ")", ":", "stream", "=", "self", ".", "_stream", "stream", ".", "write", "(", "self", ".", "alt_screen_enable", ")", "stream", ".", "write", "(", "str", "(", "self", ".", "save_title", "(", "0", ")", ")", ")", "# 0 = both icon, title", "stream", ".", "flush", "(", ")", "try", ":", "yield", "self", "finally", ":", "stream", ".", "write", "(", "self", ".", "alt_screen_disable", ")", "stream", ".", "write", "(", "str", "(", "self", ".", "restore_title", "(", "0", ")", ")", ")", "# 0 = icon & title", "stream", ".", "flush", "(", ")" ]
Create and return a soap service type defined for this instance
def get_soap_object ( self , client ) : def to_soap_attribute ( attr ) : words = attr . split ( '_' ) words = words [ : 1 ] + [ word . capitalize ( ) for word in words [ 1 : ] ] return '' . join ( words ) soap_object = client . factory . create ( self . soap_name ) for attr in self . _attributes : value = getattr ( self , attr ) setattr ( soap_object , to_soap_attribute ( attr ) , value ) return soap_object
11,957
https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L37-L49
[ "def", "_preprocess_movie_lens", "(", "ratings_df", ")", ":", "ratings_df", "[", "\"data\"", "]", "=", "1.0", "num_timestamps", "=", "ratings_df", "[", "[", "\"userId\"", ",", "\"timestamp\"", "]", "]", ".", "groupby", "(", "\"userId\"", ")", ".", "nunique", "(", ")", "last_user_timestamp", "=", "ratings_df", "[", "[", "\"userId\"", ",", "\"timestamp\"", "]", "]", ".", "groupby", "(", "\"userId\"", ")", ".", "max", "(", ")", "ratings_df", "[", "\"numberOfTimestamps\"", "]", "=", "ratings_df", "[", "\"userId\"", "]", ".", "apply", "(", "lambda", "x", ":", "num_timestamps", "[", "\"timestamp\"", "]", "[", "x", "]", ")", "ratings_df", "[", "\"lastTimestamp\"", "]", "=", "ratings_df", "[", "\"userId\"", "]", ".", "apply", "(", "lambda", "x", ":", "last_user_timestamp", "[", "\"timestamp\"", "]", "[", "x", "]", ")", "ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"numberOfTimestamps\"", "]", ">", "2", "]", "ratings_df", "=", "_create_row_col_indices", "(", "ratings_df", ")", "train_ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"timestamp\"", "]", "<", "ratings_df", "[", "\"lastTimestamp\"", "]", "]", "test_ratings_df", "=", "ratings_df", "[", "ratings_df", "[", "\"timestamp\"", "]", "==", "ratings_df", "[", "\"lastTimestamp\"", "]", "]", "return", "ratings_df", ",", "train_ratings_df", ",", "test_ratings_df" ]
Override default get_soap_object behavior to account for child Record types
def get_soap_object ( self , client ) : record_data = super ( ) . get_soap_object ( client ) record_data . records = [ Record ( r ) . get_soap_object ( client ) for r in record_data . records ] return record_data
11,958
https://github.com/jslang/responsys/blob/9b355a444c0c75dff41064502c1e2b76dfd5cb93/responsys/types.py#L158-L162
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Processes messages that have been delivered by a registered client .
def handle_message_registered ( self , msg_data , host ) : response = None if msg_data [ "method" ] == "EVENT" : logger . debug ( "<%s> <euuid:%s> Event message " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) response = self . event ( msg_data [ "cuuid" ] , host , msg_data [ "euuid" ] , msg_data [ "event_data" ] , msg_data [ "timestamp" ] , msg_data [ "priority" ] ) elif msg_data [ "method" ] == "OK EVENT" : logger . debug ( "<%s> <euuid:%s> Event confirmation message " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) try : del self . event_uuids [ msg_data [ "euuid" ] ] except KeyError : logger . warning ( "<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) elif msg_data [ "method" ] == "OK NOTIFY" : logger . debug ( "<%s> <euuid:%s> Ok notify " "received" % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) try : del self . event_uuids [ msg_data [ "euuid" ] ] except KeyError : logger . warning ( "<%s> <euuid:%s> Euuid does not exist in event " "buffer. Key was removed before we could process " "it." % ( msg_data [ "cuuid" ] , msg_data [ "euuid" ] ) ) return response
11,959
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L272-L318
[ "def", "create_fake_array_input", "(", "cls", ",", "dependency", ",", "readable_name", ",", "index", "=", "None", ")", ":", "assert", "dependency", "in", "Cache", ".", "_intermediate_results", "assert", "isinstance", "(", "readable_name", ",", "str", ")", "name", "=", "Cache", ".", "_generate_placeholder", "(", "readable_name", ")", "if", "index", "is", "None", ":", "fake_weld_input", "=", "_FakeArray", "(", "dependency", ",", "name", ")", "else", ":", "assert", "isinstance", "(", "index", ",", "tuple", ")", "fake_weld_input", "=", "_FakeStructMember", "(", "dependency", ",", "index", ",", "name", ")", "return", "fake_weld_input" ]
This function simply returns the server version number as a response to the client .
def autodiscover ( self , message ) : # Check to see if the client's version is the same as our own. if message [ "version" ] in self . allowed_versions : logger . debug ( "<%s> Client version matches server " "version." % message [ "cuuid" ] ) response = serialize_data ( { "method" : "OHAI Client" , "version" : self . version , "server_name" : self . server_name } , self . compression , encryption = False ) else : logger . warning ( "<%s> Client version %s does not match allowed server " "versions %s" % ( message [ "cuuid" ] , message [ "version" ] , self . version ) ) response = serialize_data ( { "method" : "BYE REGISTER" } , self . compression , encryption = False ) return response
11,960
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L321-L356
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
This function will register a particular client in the server s registry dictionary .
def register ( self , message , host ) : # Get the client generated cuuid from the register message cuuid = message [ "cuuid" ] # Check to see if we've hit the maximum number of registrations # If we've reached the maximum limit, return a failure response to the # client. if len ( self . registry ) > self . registration_limit : logger . warning ( "<%s> Registration limit exceeded" % cuuid ) response = serialize_data ( { "method" : "BYE REGISTER" } , self . compression , encryption = False ) return response # Insert a new record in the database with the client's information data = { "host" : host [ 0 ] , "port" : host [ 1 ] , "time" : datetime . now ( ) } # Prepare an OK REGISTER response to the client to let it know that it # has registered return_msg = { "method" : "OK REGISTER" } # If the register request has a public key included in it, then include # it in the registry. if "encryption" in message and self . encryption : data [ "encryption" ] = PublicKey ( message [ "encryption" ] [ 0 ] , message [ "encryption" ] [ 1 ] ) # Add the host to the encrypted_hosts dictionary so we know to # decrypt messages from this host self . encrypted_hosts [ host ] = cuuid # If the client requested encryption and we have it enabled, send # our public key to the client return_msg [ "encryption" ] = [ self . encryption . n , self . encryption . e ] # Add the entry to the registry if cuuid in self . registry : for key in data : self . registry [ cuuid ] [ key ] = data [ key ] else : self . registry [ cuuid ] = data self . registry [ cuuid ] [ "authenticated" ] = False # Serialize our response to the client response = serialize_data ( return_msg , self . compression , encryption = False ) # For debugging, print all the current rows in the registry logger . debug ( "<%s> Registry entries:" % cuuid ) for ( key , value ) in self . registry . items ( ) : logger . debug ( "<%s> %s %s" % ( str ( cuuid ) , str ( key ) , pformat ( value ) ) ) return response
11,961
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L359-L429
[ "def", "__set_clear_button_visibility", "(", "self", ",", "text", ")", ":", "if", "text", ":", "self", ".", "__clear_button", ".", "show", "(", ")", "else", ":", "self", ".", "__clear_button", ".", "hide", "(", ")" ]
This function will check to see if a given host with client uuid is currently registered .
def is_registered ( self , cuuid , host ) : # Check to see if the host with the client uuid exists in the registry # table. if ( cuuid in self . registry ) and ( self . registry [ cuuid ] [ "host" ] == host ) : return True else : return False
11,962
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L432-L451
[ "def", "extract_tar", "(", "archive", ",", "compression", ",", "cmd", ",", "verbosity", ",", "interactive", ",", "outdir", ")", ":", "cmdlist", "=", "[", "cmd", ",", "'--extract'", "]", "add_tar_opts", "(", "cmdlist", ",", "compression", ",", "verbosity", ")", "cmdlist", ".", "extend", "(", "[", "\"--file\"", ",", "archive", ",", "'--directory'", ",", "outdir", "]", ")", "return", "cmdlist" ]
This function will process event packets and send them to legal checks .
def event ( self , cuuid , host , euuid , event_data , timestamp , priority ) : # Set the initial response to none response = None # If the host we're sending to is using encryption, get their key to # encrypt. if host in self . encrypted_hosts : logger . debug ( "Encrypted!" ) client_key = self . registry [ cuuid ] [ "encryption" ] else : logger . debug ( "Not encrypted :<" ) client_key = None # Get the port and host port = host [ 1 ] host = host [ 0 ] # First, we need to check if the request is coming from a registered # client. If it's not coming from a registered client, we tell them to # fuck off and register first. if not self . is_registered ( cuuid , host ) : logger . warning ( "<%s> Sending BYE EVENT: Client not registered." % cuuid ) response = serialize_data ( { "method" : "BYE EVENT" , "data" : "Not registered" } , self . compression , self . encryption , client_key ) return response # Check our stored event uuid's to see if we're already processing # this event. if euuid in self . event_uuids : logger . warning ( "<%s> Event ID is already being processed: %s" % ( cuuid , euuid ) ) # If we're already working on this event, return none so we do not # reply to the client return response # If we're not already processing this event, store the event uuid # until we receive a confirmation from the client that it received our # judgement. self . event_uuids [ euuid ] = 0 logger . debug ( "<%s> <euuid:%s> Currently processing events: " "%s" % ( cuuid , euuid , str ( self . event_uuids ) ) ) logger . debug ( "<%s> <euuid:%s> New event being processed" % ( cuuid , euuid ) ) logger . debug ( "<%s> <euuid:%s> Event Data: %s" % ( cuuid , euuid , pformat ( event_data ) ) ) # Send the event to the game middleware to determine if the event is # legal or not and to process the event in the Game Server if it is # legal. if self . middleware . event_legal ( cuuid , euuid , event_data ) : logger . debug ( "<%s> <euuid:%s> Event LEGAL. Sending judgement " "to client." % ( cuuid , euuid ) ) response = serialize_data ( { "method" : "LEGAL" , "euuid" : euuid , "priority" : priority } , self . compression , self . encryption , client_key ) # Execute the event thread = threading . Thread ( target = self . middleware . event_execute , args = ( cuuid , euuid , event_data ) ) thread . start ( ) else : logger . debug ( "<%s> <euuid:%s> Event ILLEGAL. Sending judgement " "to client." % ( cuuid , euuid ) ) response = serialize_data ( { "method" : "ILLEGAL" , "euuid" : euuid , "priority" : priority } , self . compression , self . encryption , client_key ) # Schedule a task to run in x seconds to check to see if we've timed # out in receiving a response from the client. self . listener . call_later ( self . timeout , self . retransmit , { "euuid" : euuid , "response" : response , "cuuid" : cuuid } ) return response
11,963
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L454-L555
[ "def", "load", "(", "cls", ",", "fname", ",", "args", ")", ":", "if", "args", ".", "type", "==", "JSON", ":", "if", "fname", ".", "endswith", "(", "'.bz2'", ")", ":", "open_", "=", "bz2", ".", "open", "else", ":", "open_", "=", "open", "if", "args", ".", "progress", ":", "print", "(", "'Loading JSON data...'", ")", "with", "open_", "(", "fname", ",", "'rt'", ")", "as", "fp", ":", "storage", "=", "JsonStorage", ".", "load", "(", "fp", ")", "else", ":", "storage", "=", "SqliteStorage", ".", "load", "(", "fname", ")", "if", "args", ".", "settings", "is", "not", "None", ":", "extend", "(", "storage", ".", "settings", ",", "args", ".", "settings", ")", "return", "cls", ".", "from_storage", "(", "storage", ")" ]
This function will send a NOTIFY event to a registered client .
def notify ( self , cuuid , event_data ) : # Generate an event uuid for the notify event euuid = str ( uuid . uuid1 ( ) ) # If the client uses encryption, get their key to encrypt if "encryption" in self . registry [ cuuid ] : client_key = self . registry [ cuuid ] [ "encryption" ] else : client_key = None logger . debug ( "<%s> <%s> Sending NOTIFY event to client with event data: " "%s" % ( str ( cuuid ) , str ( euuid ) , pformat ( event_data ) ) ) # Look up the host details based on cuuid try : ip_address = self . registry [ cuuid ] [ "host" ] except KeyError : logger . warning ( "<%s> <%s> Host not found in registry! Transmit " "Canceled" % ( str ( cuuid ) , str ( euuid ) ) ) return False try : port = self . registry [ cuuid ] [ "port" ] except KeyError : logger . warning ( "<%s> <%s> Port not found! Transmit " "Canceled" % ( str ( cuuid ) , str ( euuid ) ) ) return False # Set up the packet and address to send to packet = serialize_data ( { "method" : "NOTIFY" , "event_data" : event_data , "euuid" : euuid } , self . compression , self . encryption , client_key ) address = ( ip_address , port ) # If we're not already processing this event, store the event uuid # until we receive a confirmation from the client that it received our # notification. self . event_uuids [ euuid ] = 0 # This is the current retry attempt logger . debug ( "<%s> Currently processing events: " "%s" % ( cuuid , pformat ( self . event_uuids ) ) ) logger . debug ( "<%s> New NOTIFY event being processed:" % cuuid ) logger . debug ( "<%s> EUUID: %s" % ( cuuid , euuid ) ) logger . debug ( "<%s> Event Data: %s" % ( cuuid , pformat ( event_data ) ) ) # Send the packet to the client self . listener . send_datagram ( packet , address ) # Schedule a task to run in x seconds to check to see if we've timed # out in receiving a response from the client/ self . listener . call_later ( self . timeout , self . retransmit , { "euuid" : euuid , "response" : packet , "cuuid" : cuuid } )
11,964
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/server.py#L558-L630
[ "def", "setOverlayTexelAspect", "(", "self", ",", "ulOverlayHandle", ",", "fTexelAspect", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTexelAspect", "result", "=", "fn", "(", "ulOverlayHandle", ",", "fTexelAspect", ")", "return", "result" ]
Used to do a mapping like event - > handle but handle is called just once upon event .
def once ( dispatcher , event , handle , * args ) : def shell ( dispatcher , * args ) : try : handle ( dispatcher , * args ) except Exception as e : raise e finally : dispatcher . del_map ( event , shell ) dispatcher . add_map ( event , shell , * args )
11,965
https://github.com/untwisted/untwisted/blob/8a8d9c8a8d0f3452d5de67cd760297bb5759f637/untwisted/wrappers.py#L7-L20
[ "def", "model_stats", "(", "self", ")", ":", "stats", "=", "self", ".", "model", ".", "default_stats", "return", "blob_data_to_dict", "(", "stats", ",", "self", ".", "_sampler", ".", "blobs", ")" ]
This is the reactor mainloop . It is intented to be called when a reactor is installed .
def mainloop ( self ) : while True : # It calls repeteadly the reactor # update method. try : self . update ( ) except Kill : # It breaks the loop # silently. # people implementing reactors from other mainloop # should implement this try: catch # suitably to their needs. break except KeyboardInterrupt : print ( self . base ) raise
11,966
https://github.com/untwisted/untwisted/blob/8a8d9c8a8d0f3452d5de67cd760297bb5759f637/untwisted/core.py#L41-L68
[ "def", "get_best_local_timezone", "(", ")", ":", "zone_name", "=", "tzlocal", ".", "get_localzone", "(", ")", ".", "zone", "if", "zone_name", "in", "pytz", ".", "all_timezones", ":", "return", "zone_name", "if", "time", ".", "daylight", ":", "local_offset", "=", "time", ".", "altzone", "localtz", "=", "time", ".", "tzname", "[", "1", "]", "else", ":", "local_offset", "=", "time", ".", "timezone", "localtz", "=", "time", ".", "tzname", "[", "0", "]", "local_offset", "=", "datetime", ".", "timedelta", "(", "seconds", "=", "-", "local_offset", ")", "for", "zone_name", "in", "pytz", ".", "all_timezones", ":", "timezone", "=", "pytz", ".", "timezone", "(", "zone_name", ")", "if", "not", "hasattr", "(", "timezone", ",", "'_tzinfos'", ")", ":", "continue", "for", "utcoffset", ",", "daylight", ",", "tzname", "in", "timezone", ".", "_tzinfos", ":", "if", "utcoffset", "==", "local_offset", "and", "tzname", "==", "localtz", ":", "return", "zone_name" ]
Start a DigidocService session
def start_session ( self , b_hold_session , sig_doc_xml = None , datafile = None ) : response = self . __invoke ( 'StartSession' , { 'bHoldSession' : b_hold_session , 'SigDocXML' : sig_doc_xml or SkipValue , 'datafile' : datafile or SkipValue , # This parameter is deprecated and exists only due to historical reasons. We need to specify it as # SkipValue to keep zeep happy 'SigningProfile' : SkipValue , } ) if response [ 'Sesscode' ] : self . data_files = [ ] self . session_code = response [ 'Sesscode' ] if sig_doc_xml : self . container = PreviouslyCreatedContainer ( ) return True # If b_hold_session is set to False, response will not contain a session # in case of errors, exceptions are raised from __invoke anyway return False
11,967
https://github.com/thorgate/django-esteid/blob/407ae513e357fedea0e3e42198df8eb9d9ff0646/esteid/digidocservice/service.py#L153-L179
[ "def", "bucket_and_path", "(", "self", ",", "url", ")", ":", "url", "=", "compat", ".", "as_str_any", "(", "url", ")", "if", "url", ".", "startswith", "(", "\"s3://\"", ")", ":", "url", "=", "url", "[", "len", "(", "\"s3://\"", ")", ":", "]", "idx", "=", "url", ".", "index", "(", "\"/\"", ")", "bucket", "=", "url", "[", ":", "idx", "]", "path", "=", "url", "[", "(", "idx", "+", "1", ")", ":", "]", "return", "bucket", ",", "path" ]
This can be used to add a signature to existing data files
def mobile_sign ( self , id_code , country , phone_nr , language = None , signing_profile = 'LT_TM' ) : if not ( self . container and isinstance ( self . container , PreviouslyCreatedContainer ) ) : assert self . data_files , 'To use MobileSign endpoint the application must ' 'add at least one data file to users session' response = self . __invoke ( 'MobileSign' , { 'SignerIDCode' : id_code , 'SignersCountry' : country , 'SignerPhoneNo' : phone_nr , 'Language' : self . parse_language ( language ) , 'Role' : SkipValue , 'City' : SkipValue , 'StateOrProvince' : SkipValue , 'PostalCode' : SkipValue , 'CountryName' : SkipValue , 'ServiceName' : self . service_name , 'AdditionalDataToBeDisplayed' : self . mobile_message , # Either LT or LT_TM, see: http://sk-eid.github.io/dds-documentation/api/api_docs/#mobilesign 'SigningProfile' : signing_profile , 'MessagingMode' : 'asynchClientServer' , 'AsyncConfiguration' : SkipValue , 'ReturnDocInfo' : SkipValue , 'ReturnDocData' : SkipValue , } ) return response
11,968
https://github.com/thorgate/django-esteid/blob/407ae513e357fedea0e3e42198df8eb9d9ff0646/esteid/digidocservice/service.py#L299-L334
[ "def", "connection_lost", "(", "self", ",", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "Exception", ")", ":", "logger", ".", "debug", "(", "'Connection to port `%s` lost: %s'", ",", "self", ".", "port", ",", "exception", ")", "else", ":", "logger", ".", "debug", "(", "'Connection to port `%s` closed'", ",", "self", ".", "port", ")", "self", ".", "connected", ".", "clear", "(", ")", "self", ".", "disconnected", ".", "set", "(", ")" ]
Test whether n is prime probabilisticly .
def is_prime ( n , k = 64 ) : if n == 2 : return True if n < 2 or n % 2 == 0 : return False for i in range ( 3 , 2048 ) : # performace optimisation if n % i == 0 : return False s = 0 d = n - 1 while True : q , r = divmod ( d , 2 ) if r == 1 : break s += 1 d = q for i in range ( k ) : a = random . randint ( 2 , n - 1 ) if check_candidate ( a , d , n , s ) : return False return True
11,969
https://github.com/sfstpala/pcr/blob/313ec17585565a0b9740f7b3f47d7a93bf37a7fc/pcr/maths.py#L34-L71
[ "def", "_ParseLogonApplications", "(", "self", ",", "parser_mediator", ",", "registry_key", ")", ":", "for", "application", "in", "self", ".", "_LOGON_APPLICATIONS", ":", "command_value", "=", "registry_key", ".", "GetValueByName", "(", "application", ")", "if", "not", "command_value", ":", "continue", "values_dict", "=", "{", "'Application'", ":", "application", ",", "'Command'", ":", "command_value", ".", "GetDataAsObject", "(", ")", ",", "'Trigger'", ":", "'Logon'", "}", "event_data", "=", "windows_events", ".", "WindowsRegistryEventData", "(", ")", "event_data", ".", "key_path", "=", "registry_key", ".", "path", "event_data", ".", "offset", "=", "registry_key", ".", "offset", "event_data", ".", "regvalue", "=", "values_dict", "event_data", ".", "source_append", "=", "': Winlogon'", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "registry_key", ".", "last_written_time", ",", "definitions", ".", "TIME_DESCRIPTION_WRITTEN", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")" ]
Return a random prime up to a certain length .
def get_prime ( bits , k = 64 ) : if bits % 8 != 0 or bits == 0 : raise ValueError ( "bits must be >= 0 and divisible by 8" ) while True : n = int . from_bytes ( os . urandom ( bits // 8 ) , "big" ) if is_prime ( n , k ) : return n
11,970
https://github.com/sfstpala/pcr/blob/313ec17585565a0b9740f7b3f47d7a93bf37a7fc/pcr/maths.py#L74-L86
[ "def", "remove_organization", "(", "self", ",", "service_desk_id", ",", "organization_id", ")", ":", "log", ".", "warning", "(", "'Removing organization...'", ")", "url", "=", "'rest/servicedeskapi/servicedesk/{}/organization'", ".", "format", "(", "service_desk_id", ")", "data", "=", "{", "'organizationId'", ":", "organization_id", "}", "return", "self", ".", "delete", "(", "url", ",", "headers", "=", "self", ".", "experimental_headers", ",", "data", "=", "data", ")" ]
Create RSA key pair .
def make_rsa_keys ( bits = 2048 , e = 65537 , k = 64 ) : p , q = None , None while p == q : p , q = get_prime ( bits // 2 ) , get_prime ( bits // 2 ) n = p * q phi_n = phi ( n , p , q ) d = mult_inv ( e , phi_n ) return n , e , d
11,971
https://github.com/sfstpala/pcr/blob/313ec17585565a0b9740f7b3f47d7a93bf37a7fc/pcr/maths.py#L119-L135
[ "def", "fill_sampling", "(", "slice_list", ",", "N", ")", ":", "A", "=", "[", "len", "(", "s", ".", "inliers", ")", "for", "s", "in", "slice_list", "]", "N_max", "=", "np", ".", "sum", "(", "A", ")", "if", "N", ">", "N_max", ":", "raise", "ValueError", "(", "\"Tried to draw {:d} samples from a pool of only {:d} items\"", ".", "format", "(", "N", ",", "N_max", ")", ")", "samples_from", "=", "np", ".", "zeros", "(", "(", "len", "(", "A", ")", ",", ")", ",", "dtype", "=", "'int'", ")", "# Number of samples to draw from each group", "remaining", "=", "N", "while", "remaining", ">", "0", ":", "remaining_groups", "=", "np", ".", "flatnonzero", "(", "samples_from", "-", "np", ".", "array", "(", "A", ")", ")", "if", "remaining", "<", "len", "(", "remaining_groups", ")", ":", "np", ".", "random", ".", "shuffle", "(", "remaining_groups", ")", "for", "g", "in", "remaining_groups", "[", ":", "remaining", "]", ":", "samples_from", "[", "g", "]", "+=", "1", "else", ":", "# Give each group the allowed number of samples. Constrain to their max size.", "to_each", "=", "max", "(", "1", ",", "int", "(", "remaining", "/", "len", "(", "remaining_groups", ")", ")", ")", "samples_from", "=", "np", ".", "min", "(", "np", ".", "vstack", "(", "(", "samples_from", "+", "to_each", ",", "A", ")", ")", ",", "axis", "=", "0", ")", "# Update remaining count", "remaining", "=", "int", "(", "N", "-", "np", ".", "sum", "(", "samples_from", ")", ")", "if", "not", "remaining", "==", "0", ":", "raise", "ValueError", "(", "\"Still {:d} samples left! This is an error in the selection.\"", ")", "# Construct index list of selected samples", "samples", "=", "[", "]", "for", "s", ",", "a", ",", "n", "in", "zip", "(", "slice_list", ",", "A", ",", "samples_from", ")", ":", "if", "a", "==", "n", ":", "samples", ".", "append", "(", "np", ".", "array", "(", "s", ".", "inliers", ")", ")", "# all", "elif", "a", "==", "0", ":", "samples", ".", "append", "(", "np", ".", "arange", "(", "[", "]", ")", ")", "else", ":", "chosen", "=", "np", ".", "random", ".", "choice", "(", "s", ".", "inliers", ",", "n", ",", "replace", "=", "False", ")", "samples", ".", "append", "(", "np", ".", "array", "(", "chosen", ")", ")", "return", "samples" ]
Register the extension with Sphinx .
def setup ( app ) : for name , ( default , rebuild , _ ) in ref . CONFIG_VALUES . iteritems ( ) : app . add_config_value ( name , default , rebuild ) app . add_directive ( 'javaimport' , ref . JavarefImportDirective ) app . add_role ( 'javaref' , ref . JavarefRole ( app ) ) app . connect ( 'builder-inited' , initialize_env ) app . connect ( 'env-purge-doc' , ref . purge_imports ) app . connect ( 'env-merge-info' , ref . merge_imports ) app . connect ( 'build-finished' , ref . cleanup )
11,972
https://github.com/bluekeyes/sphinx-javalink/blob/490e37506efa53e95ad88a665e347536e75b6254/javalink/__init__.py#L9-L25
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Purge expired values from the environment .
def validate_env ( app ) : if not hasattr ( app . env , 'javalink_config_cache' ) : app . env . javalink_config_cache = { } for conf_attr , ( _ , _ , env_attr ) in ref . CONFIG_VALUES . iteritems ( ) : if not env_attr : continue value = getattr ( app . config , conf_attr ) cached = app . env . javalink_config_cache . get ( conf_attr , value ) app . env . javalink_config_cache [ conf_attr ] = value if value != cached : app . verbose ( '[javalink] config.%s has changed, clearing related env' , conf_attr ) delattr ( app . env , env_attr )
11,973
https://github.com/bluekeyes/sphinx-javalink/blob/490e37506efa53e95ad88a665e347536e75b6254/javalink/__init__.py#L32-L58
[ "def", "correlator", "(", "A", ",", "B", ")", ":", "correlators", "=", "[", "]", "for", "i", "in", "range", "(", "len", "(", "A", ")", ")", ":", "correlator_row", "=", "[", "]", "for", "j", "in", "range", "(", "len", "(", "B", ")", ")", ":", "corr", "=", "0", "for", "k", "in", "range", "(", "len", "(", "A", "[", "i", "]", ")", ")", ":", "for", "l", "in", "range", "(", "len", "(", "B", "[", "j", "]", ")", ")", ":", "if", "k", "==", "l", ":", "corr", "+=", "A", "[", "i", "]", "[", "k", "]", "*", "B", "[", "j", "]", "[", "l", "]", "else", ":", "corr", "-=", "A", "[", "i", "]", "[", "k", "]", "*", "B", "[", "j", "]", "[", "l", "]", "correlator_row", ".", "append", "(", "corr", ")", "correlators", ".", "append", "(", "correlator_row", ")", "return", "correlators" ]
Find the path to the Java standard library jar .
def find_rt_jar ( javahome = None ) : if not javahome : if 'JAVA_HOME' in os . environ : javahome = os . environ [ 'JAVA_HOME' ] elif sys . platform == 'darwin' : # The default java binary on OS X is not part of a standard Oracle # install, so building paths relative to it does not work like it # does on other platforms. javahome = _find_osx_javahome ( ) else : javahome = _get_javahome_from_java ( _find_java_binary ( ) ) rtpath = os . path . join ( javahome , 'jre' , 'lib' , 'rt.jar' ) if not os . path . isfile ( rtpath ) : msg = 'Could not find rt.jar: {} is not a file' . format ( rtpath ) raise ExtensionError ( msg ) return rtpath
11,974
https://github.com/bluekeyes/sphinx-javalink/blob/490e37506efa53e95ad88a665e347536e75b6254/javalink/__init__.py#L61-L95
[ "async", "def", "throttle_update_heaters", "(", "self", ")", ":", "if", "(", "self", ".", "_throttle_time", "is", "not", "None", "and", "dt", ".", "datetime", ".", "now", "(", ")", "-", "self", ".", "_throttle_time", "<", "MIN_TIME_BETWEEN_UPDATES", ")", ":", "return", "self", ".", "_throttle_time", "=", "dt", ".", "datetime", ".", "now", "(", ")", "await", "self", ".", "update_heaters", "(", ")" ]
Returns True if the record shall be logged . False otherwise .
def filter ( self , record ) : found = self . _pattern . search ( record . getMessage ( ) ) return not found
11,975
https://github.com/blue-yonder/cee_syslog_handler/blob/c6006b59d38d4d8dabfc1301c689c71f35e3b8b8/cee_syslog_handler/__init__.py#L266-L273
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Get a value for key from obj if possible
def _get_value ( obj , key ) : if isinstance ( obj , ( list , tuple ) ) : for item in obj : v = _find_value ( key , item ) if v is not None : return v return None if isinstance ( obj , dict ) : return obj . get ( key ) if obj is not None : return getattr ( obj , key , None )
11,976
https://github.com/zsimic/runez/blob/14363b719a1aae1528859a501a22d075ce0abfcc/src/runez/convert.py#L262-L273
[ "def", "run_tornado", "(", "self", ",", "args", ")", ":", "server", "=", "self", "import", "tornado", ".", "ioloop", "import", "tornado", ".", "web", "import", "tornado", ".", "websocket", "ioloop", "=", "tornado", ".", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "class", "DevWebSocketHandler", "(", "tornado", ".", "websocket", ".", "WebSocketHandler", ")", ":", "def", "open", "(", "self", ")", ":", "super", "(", "DevWebSocketHandler", ",", "self", ")", ".", "open", "(", ")", "server", ".", "on_open", "(", "self", ")", "def", "on_message", "(", "self", ",", "message", ")", ":", "server", ".", "on_message", "(", "self", ",", "message", ")", "def", "on_close", "(", "self", ")", ":", "super", "(", "DevWebSocketHandler", ",", "self", ")", ".", "on_close", "(", ")", "server", ".", "on_close", "(", "self", ")", "class", "MainHandler", "(", "tornado", ".", "web", ".", "RequestHandler", ")", ":", "def", "get", "(", "self", ")", ":", "self", ".", "write", "(", "server", ".", "index_page", ")", "#: Set the call later method", "server", ".", "call_later", "=", "ioloop", ".", "call_later", "server", ".", "add_callback", "=", "ioloop", ".", "add_callback", "app", "=", "tornado", ".", "web", ".", "Application", "(", "[", "(", "r\"/\"", ",", "MainHandler", ")", ",", "(", "r\"/dev\"", ",", "DevWebSocketHandler", ")", ",", "]", ")", "app", ".", "listen", "(", "self", ".", "port", ")", "print", "(", "\"Tornado Dev server started on {}\"", ".", "format", "(", "self", ".", "port", ")", ")", "ioloop", ".", "start", "(", ")" ]
Find a value for key in any of the objects given as args
def _find_value ( key , * args ) : for arg in args : v = _get_value ( arg , key ) if v is not None : return v
11,977
https://github.com/zsimic/runez/blob/14363b719a1aae1528859a501a22d075ce0abfcc/src/runez/convert.py#L276-L281
[ "def", "_getLPA", "(", "self", ")", ":", "return", "str", "(", "self", ".", "line", ")", "+", "\":\"", "+", "str", "(", "self", ".", "pos", ")", "+", "\":\"", "+", "str", "(", "self", ".", "absPosition", ")" ]
Adds a new search path from where modules can be loaded . This function is provided for test applications to add locations to the search path so any required functionality can be loaded . It helps keeping the step implementation modules simple by placing the bulk of the implementation in separate utility libraries . This function can also be used to add the application being tested to the path so its functionality can be made available for testing .
def add_search_path ( * path_tokens ) : full_path = os . path . join ( * path_tokens ) if full_path not in sys . path : sys . path . insert ( 0 , os . path . abspath ( full_path ) )
11,978
https://github.com/abantos/bolt/blob/8b6a911d4a7b1a6e870748a523c9b2b91997c773/bolt/_btutils.py#L8-L22
[ "def", "_get_all_gnupg_options", "(", ")", ":", "three_hundred_eighteen", "=", "(", "\"\"\"\n--allow-freeform-uid --multifile\n--allow-multiple-messages --no\n--allow-multisig-verification --no-allow-freeform-uid\n--allow-non-selfsigned-uid --no-allow-multiple-messages\n--allow-secret-key-import --no-allow-non-selfsigned-uid\n--always-trust --no-armor\n--armor --no-armour\n--armour --no-ask-cert-expire\n--ask-cert-expire --no-ask-cert-level\n--ask-cert-level --no-ask-sig-expire\n--ask-sig-expire --no-auto-check-trustdb\n--attribute-fd --no-auto-key-locate\n--attribute-file --no-auto-key-retrieve\n--auto-check-trustdb --no-batch\n--auto-key-locate --no-comments\n--auto-key-retrieve --no-default-keyring\n--batch --no-default-recipient\n--bzip2-compress-level --no-disable-mdc\n--bzip2-decompress-lowmem --no-emit-version\n--card-edit --no-encrypt-to\n--card-status --no-escape-from-lines\n--cert-digest-algo --no-expensive-trust-checks\n--cert-notation --no-expert\n--cert-policy-url --no-force-mdc\n--change-pin --no-force-v3-sigs\n--charset --no-force-v4-certs\n--check-sig --no-for-your-eyes-only\n--check-sigs --no-greeting\n--check-trustdb --no-groups\n--cipher-algo --no-literal\n--clearsign --no-mangle-dos-filenames\n--command-fd --no-mdc-warning\n--command-file --no-options\n--comment --no-permission-warning\n--completes-needed --no-pgp2\n--compress-algo --no-pgp6\n--compression-algo --no-pgp7\n--compress-keys --no-pgp8\n--compress-level --no-random-seed-file\n--compress-sigs --no-require-backsigs\n--ctapi-driver --no-require-cross-certification\n--dearmor --no-require-secmem\n--dearmour --no-rfc2440-text\n--debug --no-secmem-warning\n--debug-all --no-show-notation\n--debug-ccid-driver --no-show-photos\n--debug-level --no-show-policy-url\n--decrypt --no-sig-cache\n--decrypt-files --no-sig-create-check\n--default-cert-check-level --no-sk-comments\n--default-cert-expire --no-strict\n--default-cert-level --notation-data\n--default-comment --not-dash-escaped\n--default-key --no-textmode\n--default-keyserver-url --no-throw-keyid\n--default-preference-list --no-throw-keyids\n--default-recipient --no-tty\n--default-recipient-self --no-use-agent\n--default-sig-expire --no-use-embedded-filename\n--delete-keys --no-utf8-strings\n--delete-secret-and-public-keys --no-verbose\n--delete-secret-keys --no-version\n--desig-revoke --openpgp\n--detach-sign --options\n--digest-algo --output\n--disable-ccid --override-session-key\n--disable-cipher-algo --passphrase\n--disable-dsa2 --passphrase-fd\n--disable-mdc --passphrase-file\n--disable-pubkey-algo --passphrase-repeat\n--display --pcsc-driver\n--display-charset --personal-cipher-preferences\n--dry-run --personal-cipher-prefs\n--dump-options --personal-compress-preferences\n--edit-key --personal-compress-prefs\n--emit-version --personal-digest-preferences\n--enable-dsa2 --personal-digest-prefs\n--enable-progress-filter --pgp2\n--enable-special-filenames --pgp6\n--enarmor --pgp7\n--enarmour --pgp8\n--encrypt --photo-viewer\n--encrypt-files --pipemode\n--encrypt-to --preserve-permissions\n--escape-from-lines --primary-keyring\n--exec-path --print-md\n--exit-on-status-write-error --print-mds\n--expert --quick-random\n--export --quiet\n--export-options --reader-port\n--export-ownertrust --rebuild-keydb-caches\n--export-secret-keys --recipient\n--export-secret-subkeys --recv-keys\n--fast-import --refresh-keys\n--fast-list-mode --remote-user\n--fetch-keys --require-backsigs\n--fingerprint --require-cross-certification\n--fixed-list-mode --require-secmem\n--fix-trustdb --rfc1991\n--force-mdc --rfc2440\n--force-ownertrust --rfc2440-text\n--force-v3-sigs --rfc4880\n--force-v4-certs --run-as-shm-coprocess\n--for-your-eyes-only --s2k-cipher-algo\n--gen-key --s2k-count\n--gen-prime --s2k-digest-algo\n--gen-random --s2k-mode\n--gen-revoke --search-keys\n--gnupg --secret-keyring\n--gpg-agent-info --send-keys\n--gpgconf-list --set-filename\n--gpgconf-test --set-filesize\n--group --set-notation\n--help --set-policy-url\n--hidden-encrypt-to --show-keyring\n--hidden-recipient --show-notation\n--homedir --show-photos\n--honor-http-proxy --show-policy-url\n--ignore-crc-error --show-session-key\n--ignore-mdc-error --sig-keyserver-url\n--ignore-time-conflict --sign\n--ignore-valid-from --sign-key\n--import --sig-notation\n--import-options --sign-with\n--import-ownertrust --sig-policy-url\n--interactive --simple-sk-checksum\n--keyid-format --sk-comments\n--keyring --skip-verify\n--keyserver --status-fd\n--keyserver-options --status-file\n--lc-ctype --store\n--lc-messages --strict\n--limit-card-insert-tries --symmetric\n--list-config --temp-directory\n--list-key --textmode\n--list-keys --throw-keyid\n--list-only --throw-keyids\n--list-options --trustdb-name\n--list-ownertrust --trusted-key\n--list-packets --trust-model\n--list-public-keys --try-all-secrets\n--list-secret-keys --ttyname\n--list-sig --ttytype\n--list-sigs --ungroup\n--list-trustdb --update-trustdb\n--load-extension --use-agent\n--local-user --use-embedded-filename\n--lock-multiple --user\n--lock-never --utf8-strings\n--lock-once --verbose\n--logger-fd --verify\n--logger-file --verify-files\n--lsign-key --verify-options\n--mangle-dos-filenames --version\n--marginals-needed --warranty\n--max-cert-depth --with-colons\n--max-output --with-fingerprint\n--merge-only --with-key-data\n--min-cert-level --yes\n\"\"\"", ")", ".", "split", "(", ")", "# These are extra options which only exist for GnuPG>=2.0.0", "three_hundred_eighteen", ".", "append", "(", "'--export-ownertrust'", ")", "three_hundred_eighteen", ".", "append", "(", "'--import-ownertrust'", ")", "# These are extra options which only exist for GnuPG>=2.1.0", "three_hundred_eighteen", ".", "append", "(", "'--pinentry-mode'", ")", "three_hundred_eighteen", ".", "append", "(", "'--allow-loopback-pinentry'", ")", "gnupg_options", "=", "frozenset", "(", "three_hundred_eighteen", ")", "return", "gnupg_options" ]
Loads a python script as a module .
def load_script ( filename ) : path , module_name , ext = _extract_script_components ( filename ) add_search_path ( path ) return _load_module ( module_name )
11,979
https://github.com/abantos/bolt/blob/8b6a911d4a7b1a6e870748a523c9b2b91997c773/bolt/_btutils.py#L26-L40
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Parse all datetime . date and datetime . datetime columns
def parse_dates ( df , inplace = True , * args , * * kwargs ) : if not inplace : df = df . copy ( ) for c in df . columns : i = df [ c ] . first_valid_index ( ) if i is not None and type ( df [ c ] . ix [ i ] ) in ( date , datetime ) : df [ c ] = pd . to_datetime ( df [ c ] , * args , * * kwargs ) if not inplace : return df
11,980
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L45-L58
[ "def", "removeApplicationManifest", "(", "self", ",", "pchApplicationManifestFullPath", ")", ":", "fn", "=", "self", ".", "function_table", ".", "removeApplicationManifest", "result", "=", "fn", "(", "pchApplicationManifestFullPath", ")", "return", "result" ]
cast numpy arrays to float32 if there s more than one return an array
def to_float ( * args ) : floats = [ np . array ( a , dtype = np . float32 ) for a in args ] return floats [ 0 ] if len ( floats ) == 1 else floats
11,981
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L83-L89
[ "def", "guest_reboot", "(", "self", ",", "userid", ")", ":", "LOG", ".", "info", "(", "\"Begin to reboot vm %s\"", ",", "userid", ")", "self", ".", "_smtclient", ".", "guest_reboot", "(", "userid", ")", "LOG", ".", "info", "(", "\"Complete reboot vm %s\"", ",", "userid", ")" ]
get a class or function by name
def get_attr ( name ) : i = name . rfind ( '.' ) cls = str ( name [ i + 1 : ] ) module = str ( name [ : i ] ) mod = __import__ ( module , fromlist = [ cls ] ) return getattr ( mod , cls )
11,982
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L131-L140
[ "def", "repair", "(", "self", ",", "volume_id_or_uri", ",", "timeout", "=", "-", "1", ")", ":", "data", "=", "{", "\"type\"", ":", "\"ExtraManagedStorageVolumePaths\"", ",", "\"resourceUri\"", ":", "self", ".", "_client", ".", "build_uri", "(", "volume_id_or_uri", ")", "}", "custom_headers", "=", "{", "'Accept-Language'", ":", "'en_US'", "}", "uri", "=", "self", ".", "URI", "+", "'/repair'", "return", "self", ".", "_client", ".", "create", "(", "data", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ",", "custom_headers", "=", "custom_headers", ")" ]
drop the levels of a multi - level column dataframe which are constant operates in place
def drop_constant_column_levels ( df ) : columns = df . columns constant_levels = [ i for i , level in enumerate ( columns . levels ) if len ( level ) <= 1 ] constant_levels . reverse ( ) for i in constant_levels : columns = columns . droplevel ( i ) df . columns = columns
11,983
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L214-L225
[ "def", "victim_assets", "(", "self", ",", "asset_type", "=", "None", ",", "asset_id", "=", "None", ")", ":", "type_entity_map", "=", "{", "'emailAddresses'", ":", "'victimEmailAddress'", ",", "'networkAccounts'", ":", "'victimNetworkAccount'", ",", "'phoneNumbers'", ":", "'victimPhone'", ",", "'socialNetworks'", ":", "'victimSocialNetwork'", ",", "'webSites'", ":", "'victimWebSite'", ",", "}", "resource", "=", "self", ".", "copy", "(", ")", "resource", ".", "_request_entity", "=", "'victimAsset'", "resource", ".", "_request_uri", "=", "'{}/victimAssets'", ".", "format", "(", "resource", ".", "_request_uri", ")", "if", "asset_type", "is", "not", "None", ":", "resource", ".", "_request_entity", "=", "type_entity_map", ".", "get", "(", "asset_type", ",", "'victimAsset'", ")", "resource", ".", "_request_uri", "=", "'{}/{}'", ".", "format", "(", "resource", ".", "_request_uri", ",", "asset_type", ")", "if", "asset_id", "is", "not", "None", ":", "resource", ".", "_request_uri", "=", "'{}/{}'", ".", "format", "(", "resource", ".", "_request_uri", ",", "asset_id", ")", "return", "resource" ]
Subset dictionaries to keys which map to multiple values
def dict_diff ( dicts ) : diff_keys = set ( ) for k in union ( set ( d . keys ( ) ) for d in dicts ) : values = [ ] for d in dicts : if k not in d : diff_keys . add ( k ) break else : values . append ( d [ k ] ) if nunique ( values ) > 1 : diff_keys . add ( k ) break return [ dict_subset ( d , diff_keys ) for d in dicts ]
11,984
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L273-L291
[ "def", "remove_armor", "(", "armored_data", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "armored_data", ")", "lines", "=", "stream", ".", "readlines", "(", ")", "[", "3", ":", "-", "1", "]", "data", "=", "base64", ".", "b64decode", "(", "b''", ".", "join", "(", "lines", ")", ")", "payload", ",", "checksum", "=", "data", "[", ":", "-", "3", "]", ",", "data", "[", "-", "3", ":", "]", "assert", "util", ".", "crc24", "(", "payload", ")", "==", "checksum", "return", "payload" ]
update a set - valued dictionary when key exists union sets
def dict_update_union ( d1 , d2 ) : for k in d2 : if k in d1 : d1 [ k ] . update ( d2 [ k ] ) else : d1 [ k ] = d2 [ k ]
11,985
https://github.com/potash/drain/blob/ddd62081cb9317beb5d21f86c8b4bb196ca3d222/drain/util.py#L350-L359
[ "def", "write_recovery", "(", "page", ",", "injList", ")", ":", "th", "=", "[", "''", "]", "+", "injList", "td", "=", "[", "]", "plots", "=", "[", "'sky_error_time'", ",", "'sky_error_mchirp'", ",", "'sky_error_distance'", "]", "text", "=", "{", "'sky_error_time'", ":", "'Sky error vs time'", ",", "'sky_error_mchirp'", ":", "'Sky error vs mchirp'", ",", "'sky_error_distance'", ":", "'Sky error vs distance'", "}", "for", "row", "in", "plots", ":", "pTag", "=", "text", "[", "row", "]", "d", "=", "[", "pTag", "]", "for", "inj", "in", "injList", ":", "plot", "=", "markup", ".", "page", "(", ")", "plot", "=", "markup", ".", "page", "(", ")", "p", "=", "\"%s/efficiency_OFFTRIAL_1/found_%s.png\"", "%", "(", "inj", ",", "row", ")", "plot", ".", "a", "(", "href", "=", "p", ",", "title", "=", "pTag", ")", "plot", ".", "img", "(", "src", "=", "p", ")", "plot", ".", "a", ".", "close", "(", ")", "d", ".", "append", "(", "plot", "(", ")", ")", "td", ".", "append", "(", "d", ")", "page", "=", "write_table", "(", "page", ",", "th", ",", "td", ")", "return", "page" ]
Process sass file .
def compile_file ( self , infile , outfile , outdated = False , force = False ) : myfile = codecs . open ( outfile , 'w' , 'utf-8' ) if settings . DEBUG : myfile . write ( sass . compile ( filename = infile ) ) else : myfile . write ( sass . compile ( filename = infile , output_style = 'compressed' ) ) return myfile . close ( )
11,986
https://github.com/sonic182/libsasscompiler/blob/067c2324bbed9d22966fe63d87e5b3687510bc26/libsasscompiler/__init__.py#L22-L31
[ "def", "disconnect", "(", "self", ",", "receiver", ")", ":", "if", "receiver", "not", "in", "self", ".", "receivers", ".", "keys", "(", ")", ":", "raise", "Exception", "(", "\"No receiver %s was registered\"", "%", "receiver", ")", "self", ".", "receivers", "[", "receiver", "]", ".", "disconnect", "(", ")", "del", "(", "self", ".", "receivers", "[", "receiver", "]", ")", "self", ".", "__log", ".", "debug", "(", "\"Receiver %s disconnected\"", "%", "receiver", ")" ]
Create a new target representing a task and its parameters
def from_task ( cls , task ) : target = cls ( name = task . get_name ( ) , params = task . get_param_string ( ) ) return target
11,987
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/etl/targets.py#L21-L34
[ "def", "store_tokens", "(", "self", ",", "access_token", ",", "id_token", ")", ":", "session", "=", "self", ".", "request", ".", "session", "if", "self", ".", "get_settings", "(", "'OIDC_STORE_ACCESS_TOKEN'", ",", "False", ")", ":", "session", "[", "'oidc_access_token'", "]", "=", "access_token", "if", "self", ".", "get_settings", "(", "'OIDC_STORE_ID_TOKEN'", ",", "False", ")", ":", "session", "[", "'oidc_id_token'", "]", "=", "id_token" ]
Base query for a target .
def _base_query ( self , session ) : return session . query ( ORMTargetMarker ) . filter ( ORMTargetMarker . name == self . name ) . filter ( ORMTargetMarker . params == self . params )
11,988
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/etl/targets.py#L36-L44
[ "def", "compose", "(", "list_of_files", ",", "destination_file", ",", "files_metadata", "=", "None", ",", "content_type", "=", "None", ",", "retry_params", "=", "None", ",", "_account_id", "=", "None", ")", ":", "api", "=", "storage_api", ".", "_get_storage_api", "(", "retry_params", "=", "retry_params", ",", "account_id", "=", "_account_id", ")", "if", "os", ".", "getenv", "(", "'SERVER_SOFTWARE'", ")", ".", "startswith", "(", "'Dev'", ")", ":", "def", "_temp_func", "(", "file_list", ",", "destination_file", ",", "content_type", ")", ":", "bucket", "=", "'/'", "+", "destination_file", ".", "split", "(", "'/'", ")", "[", "1", "]", "+", "'/'", "with", "open", "(", "destination_file", ",", "'w'", ",", "content_type", "=", "content_type", ")", "as", "gcs_merge", ":", "for", "source_file", "in", "file_list", ":", "with", "open", "(", "bucket", "+", "source_file", "[", "'Name'", "]", ",", "'r'", ")", "as", "gcs_source", ":", "gcs_merge", ".", "write", "(", "gcs_source", ".", "read", "(", ")", ")", "compose_object", "=", "_temp_func", "else", ":", "compose_object", "=", "api", ".", "compose_object", "file_list", ",", "_", "=", "_validate_compose_list", "(", "destination_file", ",", "list_of_files", ",", "files_metadata", ",", "32", ")", "compose_object", "(", "file_list", ",", "destination_file", ",", "content_type", ")" ]
Check if a target exists
def exists ( self ) : # get DB connection session = client . get_client ( ) . create_session ( ) # query for target existence ret = self . _base_query ( session ) . count ( ) > 0 session . close ( ) return ret
11,989
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/etl/targets.py#L46-L63
[ "def", "_unwrap_result", "(", "action", ",", "result", ")", ":", "if", "not", "result", ":", "return", "elif", "action", "in", "{", "'DeleteItem'", ",", "'PutItem'", ",", "'UpdateItem'", "}", ":", "return", "_unwrap_delete_put_update_item", "(", "result", ")", "elif", "action", "==", "'GetItem'", ":", "return", "_unwrap_get_item", "(", "result", ")", "elif", "action", "==", "'Query'", "or", "action", "==", "'Scan'", ":", "return", "_unwrap_query_scan", "(", "result", ")", "elif", "action", "==", "'CreateTable'", ":", "return", "_unwrap_create_table", "(", "result", ")", "elif", "action", "==", "'DescribeTable'", ":", "return", "_unwrap_describe_table", "(", "result", ")", "return", "result" ]
Create an instance of the current target in the database
def create ( self ) : session = client . get_client ( ) . create_session ( ) if not self . _base_query ( session ) . count ( ) > 0 : # store a new target instance to the database marker = ORMTargetMarker ( name = self . name , params = self . params ) session . add ( marker ) session . commit ( ) session . close ( )
11,990
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/etl/targets.py#L65-L79
[ "def", "subvolume_sync", "(", "path", ",", "subvolids", "=", "None", ",", "sleep", "=", "None", ")", ":", "if", "subvolids", "and", "type", "(", "subvolids", ")", "is", "not", "list", ":", "raise", "CommandExecutionError", "(", "'Subvolids parameter must be a list'", ")", "cmd", "=", "[", "'btrfs'", ",", "'subvolume'", ",", "'sync'", "]", "if", "sleep", ":", "cmd", ".", "extend", "(", "[", "'-s'", ",", "sleep", "]", ")", "cmd", ".", "append", "(", "path", ")", "if", "subvolids", ":", "cmd", ".", "extend", "(", "subvolids", ")", "res", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "salt", ".", "utils", ".", "fsutils", ".", "_verify_run", "(", "res", ")", "return", "True" ]
Remove a target
def remove ( self ) : session = client . get_client ( ) . create_session ( ) if not self . _base_query ( session ) . count ( ) > 0 : session . close ( ) raise RuntimeError ( "Target does not exist, name={:s}, params={:s}" "" . format ( self . name , self . params ) ) # remove the target from the database self . _base_query ( session ) . delete ( ) session . commit ( ) session . close ( )
11,991
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/ozelot/etl/targets.py#L81-L97
[ "def", "_psturng", "(", "q", ",", "r", ",", "v", ")", ":", "if", "q", "<", "0.", ":", "raise", "ValueError", "(", "'q should be >= 0'", ")", "opt_func", "=", "lambda", "p", ",", "r", ",", "v", ":", "abs", "(", "_qsturng", "(", "p", ",", "r", ",", "v", ")", "-", "q", ")", "if", "v", "==", "1", ":", "if", "q", "<", "_qsturng", "(", ".9", ",", "r", ",", "1", ")", ":", "return", ".1", "elif", "q", ">", "_qsturng", "(", ".999", ",", "r", ",", "1", ")", ":", "return", ".001", "return", "1.", "-", "fminbound", "(", "opt_func", ",", ".9", ",", ".999", ",", "args", "=", "(", "r", ",", "v", ")", ")", "else", ":", "if", "q", "<", "_qsturng", "(", ".1", ",", "r", ",", "v", ")", ":", "return", ".9", "elif", "q", ">", "_qsturng", "(", ".999", ",", "r", ",", "v", ")", ":", "return", ".001", "return", "1.", "-", "fminbound", "(", "opt_func", ",", ".1", ",", ".999", ",", "args", "=", "(", "r", ",", "v", ")", ")" ]
Extend the table with uppercase options
def add_uppercase ( table ) : orig = table . copy ( ) orig . update ( dict ( ( k . capitalize ( ) , v . capitalize ( ) ) for k , v in table . items ( ) ) ) return orig
11,992
https://github.com/dchaplinsky/translit-ua/blob/14e634492c7ce937d77436772fa32d2de5707a9b/translitua/translit.py#L12-L35
[ "def", "_wait_for_files", "(", "path", ")", ":", "timeout", "=", "0.001", "remaining", "=", "[", "]", "while", "timeout", "<", "1.0", ":", "remaining", "=", "[", "]", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "L", "=", "os", ".", "listdir", "(", "path", ")", "for", "target", "in", "L", ":", "_remaining", "=", "_wait_for_files", "(", "target", ")", "if", "_remaining", ":", "remaining", ".", "extend", "(", "_remaining", ")", "continue", "try", ":", "os", ".", "unlink", "(", "path", ")", "except", "FileNotFoundError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "return", "except", "(", "OSError", ",", "IOError", ",", "PermissionError", ")", ":", "time", ".", "sleep", "(", "timeout", ")", "timeout", "*=", "2", "remaining", ".", "append", "(", "path", ")", "else", ":", "return", "return", "remaining" ]
u Transliterates given unicode src text to transliterated variant according to a given transliteration table . Official ukrainian transliteration is used by default
def translit ( src , table = UkrainianKMU , preserve_case = True ) : src = text_type ( src ) src_is_upper = src . isupper ( ) if hasattr ( table , "DELETE_PATTERN" ) : src = table . DELETE_PATTERN . sub ( u"" , src ) if hasattr ( table , "PATTERN1" ) : src = table . PATTERN1 . sub ( lambda x : table . SPECIAL_CASES [ x . group ( ) ] , src ) if hasattr ( table , "PATTERN2" ) : src = table . PATTERN2 . sub ( lambda x : table . FIRST_CHARACTERS [ x . group ( ) ] , src ) res = src . translate ( table . MAIN_TRANSLIT_TABLE ) if src_is_upper and preserve_case : return res . upper ( ) else : return res
11,993
https://github.com/dchaplinsky/translit-ua/blob/14e634492c7ce937d77436772fa32d2de5707a9b/translitua/translit.py#L1043-L1156
[ "async", "def", "stop", "(", "self", ",", "*", "*", "kwargs", ")", ":", "_LOGGER", ".", "debug", "(", "'Shutting down pairing server'", ")", "if", "self", ".", "_web_server", "is", "not", "None", ":", "await", "self", ".", "_web_server", ".", "shutdown", "(", ")", "self", ".", "_server", ".", "close", "(", ")", "if", "self", ".", "_server", "is", "not", "None", ":", "await", "self", ".", "_server", ".", "wait_closed", "(", ")" ]
Store entities and their attributes
def store ( self , df , attribute_columns ) : # ID start values depend on currently stored entities/attributes! entity_id_start = models . Entity . get_max_id ( self . session ) + 1 attribute_id_start = models . Attribute . get_max_id ( self . session ) + 1 # append ID and type columns df [ 'id' ] = range ( entity_id_start , entity_id_start + len ( df ) ) df [ 'type' ] = self . type # store entities df [ [ 'id' , 'type' ] ] . to_sql ( name = models . Entity . __tablename__ , con = self . client . engine , if_exists = 'append' , index = False ) # store attributes for col in attribute_columns : # ID column of df is the entity ID of the attribute attr_df = df [ [ col , 'id' ] ] . rename ( columns = { 'id' : 'entity_id' , col : 'value' } ) attr_df [ 'name' ] = col # add entity ID column, need to respect already existing entities attr_df [ 'id' ] = range ( attribute_id_start , attribute_id_start + len ( df ) ) attribute_id_start += len ( df ) # store attr_df . to_sql ( name = models . Attribute . __tablename__ , con = self . client . engine , if_exists = 'append' , index = False )
11,994
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/examples/leonardo/leonardo/kvstore/pipeline.py#L49-L86
[ "def", "multi_load_data_custom", "(", "Channel", ",", "TraceTitle", ",", "RunNos", ",", "directoryPath", "=", "'.'", ",", "calcPSD", "=", "True", ",", "NPerSegmentPSD", "=", "1000000", ")", ":", "# files = glob('{}/*'.format(directoryPath))", "# files_CorrectChannel = []", "# for file_ in files:", "# if 'C{}'.format(Channel) in file_:", "# files_CorrectChannel.append(file_)", "# files_CorrectRunNo = []", "# for RunNo in RunNos:", "# files_match = _fnmatch.filter(", "# files_CorrectChannel, '*C{}'.format(Channel)+TraceTitle+str(RunNo).zfill(5)+'.*')", "# for file_ in files_match:", "# files_CorrectRunNo.append(file_)", "matching_files", "=", "search_data_custom", "(", "Channel", ",", "TraceTitle", ",", "RunNos", ",", "directoryPath", ")", "cpu_count", "=", "_cpu_count", "(", ")", "workerPool", "=", "_Pool", "(", "cpu_count", ")", "# for filepath in files_CorrectRepeatNo:", "# print(filepath)", "# data.append(load_data(filepath))", "load_data_partial", "=", "_partial", "(", "load_data", ",", "calcPSD", "=", "calcPSD", ",", "NPerSegmentPSD", "=", "NPerSegmentPSD", ")", "data", "=", "workerPool", ".", "map", "(", "load_data_partial", ",", "matching_files", ")", "workerPool", ".", "close", "(", ")", "workerPool", ".", "terminate", "(", ")", "workerPool", ".", "join", "(", ")", "return", "data" ]
Load all paintings into the database
def run ( self ) : df = PaintingsInputData ( ) . load ( ) # rename columns df . rename ( columns = { 'paintingLabel' : 'name' } , inplace = True ) # get artist IDs, map via artist wiki ID artists = models . Entity . query_with_attributes ( 'artist' , self . client ) df [ 'artist_id' ] = df [ 'creator_wiki_id' ] . map ( artists . set_index ( 'wiki_id' ) [ 'id' ] ) # define attributes to create attribute_columns = [ 'name' , 'wiki_id' , 'area' , 'decade' , 'artist_id' ] # store entities and attributes self . store ( df , attribute_columns ) self . done ( )
11,995
https://github.com/ehansis/ozelot/blob/948675e02eb6fca940450f5cb814f53e97159e5b/examples/leonardo/leonardo/kvstore/pipeline.py#L149-L168
[ "def", "update_experiment", "(", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "return", "None", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "experiment_dict", "[", "key", "]", ",", "dict", ")", ":", "if", "experiment_dict", "[", "key", "]", ".", "get", "(", "'status'", ")", "!=", "'STOPPED'", ":", "nni_config", "=", "Config", "(", "experiment_dict", "[", "key", "]", "[", "'fileName'", "]", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "not", "detect_process", "(", "rest_pid", ")", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "'STOPPED'", ")", "continue", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "startTime", ",", "endTime", "=", "get_experiment_time", "(", "rest_port", ")", "if", "startTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'startTime'", ",", "startTime", ")", "if", "endTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'endTime'", ",", "endTime", ")", "status", "=", "get_experiment_status", "(", "rest_port", ")", "if", "status", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "status", ")" ]
Serializes normal Python datatypes into plaintext using json .
def serialize_data ( data , compression = False , encryption = False , public_key = None ) : message = json . dumps ( data ) if compression : message = zlib . compress ( message ) message = binascii . b2a_base64 ( message ) if encryption and public_key : message = encryption . encrypt ( message , public_key ) encoded_message = str . encode ( message ) return encoded_message
11,996
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/core.py#L44-L76
[ "def", "_check_requirements", "(", "self", ")", ":", "if", "not", "self", ".", "_path", ":", "raise", "IOUError", "(", "\"IOU image is not configured\"", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "_path", ")", "or", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_path", ")", ":", "if", "os", ".", "path", ".", "islink", "(", "self", ".", "_path", ")", ":", "raise", "IOUError", "(", "\"IOU image '{}' linked to '{}' is not accessible\"", ".", "format", "(", "self", ".", "_path", ",", "os", ".", "path", ".", "realpath", "(", "self", ".", "_path", ")", ")", ")", "else", ":", "raise", "IOUError", "(", "\"IOU image '{}' is not accessible\"", ".", "format", "(", "self", ".", "_path", ")", ")", "try", ":", "with", "open", "(", "self", ".", "_path", ",", "\"rb\"", ")", "as", "f", ":", "# read the first 7 bytes of the file.", "elf_header_start", "=", "f", ".", "read", "(", "7", ")", "except", "OSError", "as", "e", ":", "raise", "IOUError", "(", "\"Cannot read ELF header for IOU image '{}': {}\"", ".", "format", "(", "self", ".", "_path", ",", "e", ")", ")", "# IOU images must start with the ELF magic number, be 32-bit or 64-bit, little endian", "# and have an ELF version of 1 normal IOS image are big endian!", "if", "elf_header_start", "!=", "b'\\x7fELF\\x01\\x01\\x01'", "and", "elf_header_start", "!=", "b'\\x7fELF\\x02\\x01\\x01'", ":", "raise", "IOUError", "(", "\"'{}' is not a valid IOU image\"", ".", "format", "(", "self", ".", "_path", ")", ")", "if", "not", "os", ".", "access", "(", "self", ".", "_path", ",", "os", ".", "X_OK", ")", ":", "raise", "IOUError", "(", "\"IOU image '{}' is not executable\"", ".", "format", "(", "self", ".", "_path", ")", ")" ]
Unserializes the packet data and converts it from json format to normal Python datatypes .
def unserialize_data ( data , compression = False , encryption = False ) : try : if encryption : data = encryption . decrypt ( data ) except Exception as err : logger . error ( "Decryption Error: " + str ( err ) ) message = False try : if compression : data = binascii . a2b_base64 ( data ) data = zlib . decompress ( data ) message = json . loads ( data ) except Exception as err : logger . error ( "Decompression Error: " + str ( err ) ) message = False decoded_message = data . decode ( ) if not encryption and not compression : message = json . loads ( decoded_message ) return message
11,997
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/core.py#L78-L119
[ "def", "_file_watcher", "(", "state", ")", ":", "conf", "=", "state", ".", "app", ".", "config", "file_path", "=", "conf", ".", "get", "(", "'WAFFLE_WATCHER_FILE'", ",", "'/tmp/waffleconf.txt'", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "# Create watch file", "open", "(", "file_path", ",", "'a'", ")", ".", "close", "(", ")", "while", "True", ":", "tstamp", "=", "os", ".", "path", ".", "getmtime", "(", "file_path", ")", "# Compare timestamps and update config if needed", "if", "tstamp", ">", "state", ".", "_tstamp", ":", "state", ".", "update_conf", "(", ")", "state", ".", "_tstamp", "=", "tstamp", "# Not too critical", "time", ".", "sleep", "(", "10", ")" ]
Starts the listen loop . If threading is enabled then the loop will be started in its own thread .
def listen ( self ) : self . listening = True if self . threading : from threading import Thread self . listen_thread = Thread ( target = self . listen_loop ) self . listen_thread . daemon = True self . listen_thread . start ( ) self . scheduler_thread = Thread ( target = self . scheduler ) self . scheduler_thread . daemon = True self . scheduler_thread . start ( ) else : self . listen_loop ( )
11,998
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/core.py#L192-L216
[ "def", "from_data", "(", "cls", ",", "blob", ")", ":", "version", ",", "data", "=", "decompress_datablob", "(", "DATA_BLOB_MAGIC_RETRY", ",", "blob", ")", "if", "version", "==", "1", ":", "for", "clazz", "in", "cls", ".", "_all_subclasses", "(", ")", ":", "if", "clazz", ".", "__name__", "==", "data", "[", "\"_class_name\"", "]", ":", "return", "clazz", ".", "_from_data_v1", "(", "data", ")", "raise", "Exception", "(", "\"Invalid data blob data or version\"", ")" ]
Starts the listen loop and executes the receieve_datagram method whenever a packet is receieved .
def listen_loop ( self ) : while self . listening : try : data , address = self . sock . recvfrom ( self . bufsize ) self . receive_datagram ( data , address ) if self . stats_enabled : self . stats [ 'bytes_recieved' ] += len ( data ) except socket . error as error : if error . errno == errno . WSAECONNRESET : logger . info ( "connection reset" ) else : raise logger . info ( "Shutting down the listener..." )
11,999
https://github.com/ShadowBlip/Neteria/blob/1a8c976eb2beeca0a5a272a34ac58b2c114495a4/neteria/core.py#L218-L242
[ "def", "QRatio", "(", "s1", ",", "s2", ",", "force_ascii", "=", "True", ",", "full_process", "=", "True", ")", ":", "if", "full_process", ":", "p1", "=", "utils", ".", "full_process", "(", "s1", ",", "force_ascii", "=", "force_ascii", ")", "p2", "=", "utils", ".", "full_process", "(", "s2", ",", "force_ascii", "=", "force_ascii", ")", "else", ":", "p1", "=", "s1", "p2", "=", "s2", "if", "not", "utils", ".", "validate_string", "(", "p1", ")", ":", "return", "0", "if", "not", "utils", ".", "validate_string", "(", "p2", ")", ":", "return", "0", "return", "ratio", "(", "p1", ",", "p2", ")" ]