query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Retrieves all episodes for a particular series given its TheTVDB and filtered by additional optional details .
def get_series_episodes ( self , series_id , episode_number = None , aired_season = None , aired_episode = None , dvd_season = None , dvd_episode = None , imdb_id = None , page = 1 ) : arguments = locals ( ) optional_parameters = { 'episode_number' : 'absoluteNumber' , 'aired_season' : 'airedSeason' , 'aired_episode' : 'airedEpisode' , 'dvd_season' : 'dvdSeason' , 'dvd_episode' : 'dvdEpisode' , 'imdb_id' : 'imdbId' , 'page' : 'page' } query_string = utils . query_param_string_from_option_args ( optional_parameters , arguments ) raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/series/%d/episodes/query?%s' % ( series_id , query_string ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,900
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L167-L194
[ "def", "set_domain_workgroup", "(", "workgroup", ")", ":", "if", "six", ".", "PY2", ":", "workgroup", "=", "_to_unicode", "(", "workgroup", ")", "# Initialize COM", "with", "salt", ".", "utils", ".", "winapi", ".", "Com", "(", ")", ":", "# Grab the first Win32_ComputerSystem object from wmi", "conn", "=", "wmi", ".", "WMI", "(", ")", "comp", "=", "conn", ".", "Win32_ComputerSystem", "(", ")", "[", "0", "]", "# Now we can join the new workgroup", "res", "=", "comp", ".", "JoinDomainOrWorkgroup", "(", "Name", "=", "workgroup", ".", "upper", "(", ")", ")", "return", "True", "if", "not", "res", "[", "0", "]", "else", "False" ]
Retrives a list of series that have changed on TheTVDB since a provided from time parameter and optionally to an specified to time .
def get_updated ( self , from_time , to_time = None ) : arguments = locals ( ) optional_parameters = { 'to_time' : 'toTime' } query_string = 'fromTime=%s&%s' % ( from_time , utils . query_param_string_from_option_args ( optional_parameters , arguments ) ) raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/uodated/query?%s' % query_string , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,901
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L252-L271
[ "def", "scrnaseq_concatenate_metadata", "(", "samples", ")", ":", "barcodes", "=", "{", "}", "counts", "=", "\"\"", "metadata", "=", "{", "}", "has_sample_barcodes", "=", "False", "for", "sample", "in", "dd", ".", "sample_data_iterator", "(", "samples", ")", ":", "if", "dd", ".", "get_sample_barcodes", "(", "sample", ")", ":", "has_sample_barcodes", "=", "True", "with", "open", "(", "dd", ".", "get_sample_barcodes", "(", "sample", ")", ")", "as", "inh", ":", "for", "line", "in", "inh", ":", "cols", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "\",\"", ")", "if", "len", "(", "cols", ")", "==", "1", ":", "# Assign sample name in case of missing in barcodes", "cols", ".", "append", "(", "\"NaN\"", ")", "barcodes", "[", "(", "dd", ".", "get_sample_name", "(", "sample", ")", ",", "cols", "[", "0", "]", ")", "]", "=", "cols", "[", "1", ":", "]", "else", ":", "barcodes", "[", "(", "dd", ".", "get_sample_name", "(", "sample", ")", ",", "\"NaN\"", ")", "]", "=", "[", "dd", ".", "get_sample_name", "(", "sample", ")", ",", "\"NaN\"", "]", "counts", "=", "dd", ".", "get_combined_counts", "(", "sample", ")", "meta", "=", "map", "(", "str", ",", "list", "(", "sample", "[", "\"metadata\"", "]", ".", "values", "(", ")", ")", ")", "meta_cols", "=", "list", "(", "sample", "[", "\"metadata\"", "]", ".", "keys", "(", ")", ")", "meta", "=", "[", "\"NaN\"", "if", "not", "v", "else", "v", "for", "v", "in", "meta", "]", "metadata", "[", "dd", ".", "get_sample_name", "(", "sample", ")", "]", "=", "meta", "metadata_fn", "=", "counts", "+", "\".metadata\"", "if", "file_exists", "(", "metadata_fn", ")", ":", "return", "samples", "with", "file_transaction", "(", "metadata_fn", ")", "as", "tx_metadata_fn", ":", "with", "open", "(", "tx_metadata_fn", ",", "'w'", ")", "as", "outh", ":", "outh", ".", "write", "(", "\",\"", ".", "join", "(", "[", "\"sample\"", "]", "+", "meta_cols", ")", "+", "'\\n'", ")", "with", "open", "(", "counts", "+", "\".colnames\"", ")", "as", "inh", ":", "for", "line", "in", "inh", ":", "sample", "=", "line", ".", "split", "(", "\":\"", ")", "[", "0", "]", "if", "has_sample_barcodes", ":", "barcode", "=", "sample", ".", "split", "(", "\"-\"", ")", "[", "1", "]", "else", ":", "barcode", "=", "\"NaN\"", "outh", ".", "write", "(", "\",\"", ".", "join", "(", "barcodes", "[", "(", "sample", ",", "barcode", ")", "]", "+", "metadata", "[", "sample", "]", ")", "+", "'\\n'", ")", "return", "samples" ]
Retrieves information about the user currently using the api .
def get_user ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user' , headers = self . __get_header_with_auth ( ) ) )
6,902
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L274-L282
[ "def", "get_modifications", "(", "self", ")", ":", "# Get all the specific mod types", "mod_event_types", "=", "list", "(", "ont_to_mod_type", ".", "keys", "(", ")", ")", "# Add ONT::PTMs as a special case", "mod_event_types", "+=", "[", "'ONT::PTM'", "]", "mod_events", "=", "[", "]", "for", "mod_event_type", "in", "mod_event_types", ":", "events", "=", "self", ".", "tree", ".", "findall", "(", "\"EVENT/[type='%s']\"", "%", "mod_event_type", ")", "mod_extracted", "=", "self", ".", "extracted_events", ".", "get", "(", "mod_event_type", ",", "[", "]", ")", "for", "event", "in", "events", ":", "event_id", "=", "event", ".", "attrib", ".", "get", "(", "'id'", ")", "if", "event_id", "not", "in", "mod_extracted", ":", "mod_events", ".", "append", "(", "event", ")", "# Iterate over all modification events", "for", "event", "in", "mod_events", ":", "stmts", "=", "self", ".", "_get_modification_event", "(", "event", ")", "if", "stmts", ":", "for", "stmt", "in", "stmts", ":", "self", ".", "statements", ".", "append", "(", "stmt", ")" ]
Retrieves the list of tv series the current user has flagged as favorite .
def get_user_favorites ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/favorites' , headers = self . __get_header_with_auth ( ) ) )
6,903
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L285-L293
[ "def", "_remove_monitor", "(", "monitors", ",", "handle", ",", "devices", ",", "events", ")", ":", "empty_devices", "=", "[", "]", "for", "conn_string", "in", "devices", ":", "data", "=", "monitors", ".", "get", "(", "conn_string", ")", "if", "data", "is", "None", ":", "continue", "for", "event", "in", "events", ":", "event_dict", "=", "data", ".", "get", "(", "event", ")", "if", "event_dict", "is", "None", ":", "continue", "if", "handle", "in", "event_dict", ":", "del", "event_dict", "[", "handle", "]", "if", "len", "(", "event_dict", ")", "==", "0", ":", "del", "data", "[", "event", "]", "if", "len", "(", "data", ")", "==", "0", ":", "empty_devices", ".", "append", "(", "conn_string", ")", "return", "empty_devices" ]
Deletes the series of the provided id from the favorites list of the current user .
def delete_user_favorite ( self , series_id ) : return self . parse_raw_response ( requests_util . run_request ( 'delete' , self . API_BASE_URL + '/user/favorites/%d' % series_id , headers = self . __get_header_with_auth ( ) ) )
6,904
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L296-L306
[ "def", "create_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", "=", "None", ",", "args", "=", "None", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "json", ".", "dumps", "(", "{", "'routing_key'", ":", "rt_key", ",", "'arguments'", ":", "args", "or", "[", "]", "}", ")", "path", "=", "Client", ".", "urls", "[", "'bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ")", "binding", "=", "self", ".", "_call", "(", "path", ",", "'POST'", ",", "body", "=", "body", ",", "headers", "=", "Client", ".", "json_headers", ")", "return", "binding" ]
Returns a list of the ratings provided by the current user .
def __get_user_ratings ( self ) : return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/ratings' , headers = self . __get_header_with_auth ( ) ) )
6,905
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L322-L330
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Returns a list of the ratings for the type of item provided for the current user .
def get_user_ratings ( self , item_type = None ) : if item_type : query_string = 'itemType=%s' % item_type return self . parse_raw_response ( requests_util . run_request ( 'get' , self . API_BASE_URL + '/user/ratings/qeury?%s' % query_string , headers = self . __get_header_with_auth ( ) ) ) else : return self . __get_user_ratings ( )
6,906
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L333-L348
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Adds the rating for the item indicated for the current user .
def add_user_rating ( self , item_type , item_id , item_rating ) : raw_response = requests_util . run_request ( 'put' , self . API_BASE_URL + '/user/ratings/%s/%d/%d' % ( item_type , item_id , item_rating ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,907
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L351-L366
[ "def", "attach", "(", "self", ",", "stdout", "=", "True", ",", "stderr", "=", "True", ",", "stream", "=", "True", ",", "logs", "=", "False", ")", ":", "try", ":", "data", "=", "parse_stream", "(", "self", ".", "client", ".", "attach", "(", "self", ".", "id", ",", "stdout", ",", "stderr", ",", "stream", ",", "logs", ")", ")", "except", "KeyboardInterrupt", ":", "logger", ".", "warning", "(", "\"service container: {0} has been interrupted. \"", "\"The container will be stopped but will not be deleted.\"", ".", "format", "(", "self", ".", "name", ")", ")", "data", "=", "None", "self", ".", "stop", "(", ")", "return", "data" ]
Deletes from the list of rating of the current user the rating provided for the specified element type .
def delete_user_rating ( self , item_type , item_id ) : raw_response = requests_util . run_request ( 'delete' , self . API_BASE_URL + '/user/ratings/%s/%d' % ( item_type , item_id ) , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,908
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L369-L382
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Returns the full information of the episode belonging to the Id provided .
def get_episode ( self , episode_id ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/episodes/%d' % episode_id , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,909
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L385-L396
[ "def", "_convert_strls", "(", "self", ",", "data", ")", ":", "convert_cols", "=", "[", "col", "for", "i", ",", "col", "in", "enumerate", "(", "data", ")", "if", "self", ".", "typlist", "[", "i", "]", "==", "32768", "or", "col", "in", "self", ".", "_convert_strl", "]", "if", "convert_cols", ":", "ssw", "=", "StataStrLWriter", "(", "data", ",", "convert_cols", ")", "tab", ",", "new_data", "=", "ssw", ".", "generate_table", "(", ")", "data", "=", "new_data", "self", ".", "_strl_blob", "=", "ssw", ".", "generate_blob", "(", "tab", ")", "return", "data" ]
Returns a list of all language options available in TheTVDB .
def get_languages ( self ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/languages' , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,910
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L399-L409
[ "def", "wnincd", "(", "left", ",", "right", ",", "window", ")", ":", "assert", "isinstance", "(", "window", ",", "stypes", ".", "SpiceCell", ")", "assert", "window", ".", "dtype", "==", "1", "left", "=", "ctypes", ".", "c_double", "(", "left", ")", "right", "=", "ctypes", ".", "c_double", "(", "right", ")", "return", "bool", "(", "libspice", ".", "wnincd_c", "(", "left", ",", "right", ",", "ctypes", ".", "byref", "(", "window", ")", ")", ")" ]
Retrieves information about the language of the given id .
def get_language ( self , language_id ) : raw_response = requests_util . run_request ( 'get' , self . API_BASE_URL + '/languages/%d' % language_id , headers = self . __get_header_with_auth ( ) ) return self . parse_raw_response ( raw_response )
6,911
https://github.com/thilux/tvdb_client/blob/2d5106f260367c0abe1284683697874df6343f78/tvdb_client/clients/ApiV2Client.py#L412-L423
[ "def", "proxy", "(", "ctx", ",", "bind", ",", "port", ")", ":", "app", "=", "web", ".", "Application", "(", ")", "app", ".", "on_startup", ".", "append", "(", "startup_proxy", ")", "app", ".", "on_cleanup", ".", "append", "(", "cleanup_proxy", ")", "app", ".", "router", ".", "add_route", "(", "\"GET\"", ",", "r'/stream/{path:.*$}'", ",", "websocket_handler", ")", "app", ".", "router", ".", "add_route", "(", "\"GET\"", ",", "r'/wsproxy/{path:.*$}'", ",", "websocket_handler", ")", "app", ".", "router", ".", "add_route", "(", "'*'", ",", "r'/{path:.*$}'", ",", "web_handler", ")", "if", "getattr", "(", "ctx", ".", "args", ",", "'testing'", ",", "False", ")", ":", "return", "app", "web", ".", "run_app", "(", "app", ",", "host", "=", "bind", ",", "port", "=", "port", ")" ]
Establish API key and password associated with APIv1 commands .
def SetCredentials ( api_key , api_passwd ) : global V1_API_KEY global V1_API_PASSWD global _V1_ENABLED _V1_ENABLED = True V1_API_KEY = api_key V1_API_PASSWD = api_passwd
6,912
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/__init__.py#L54-L61
[ "def", "future_set_exception_unless_cancelled", "(", "future", ":", "\"Union[futures.Future[_T], Future[_T]]\"", ",", "exc", ":", "BaseException", ")", "->", "None", ":", "if", "not", "future", ".", "cancelled", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "else", ":", "app_log", ".", "error", "(", "\"Exception after Future was cancelled\"", ",", "exc_info", "=", "exc", ")" ]
Normalize the input array so that it sums to 1 .
def normalize ( A , axis = None , inplace = False ) : if not inplace : A = A . copy ( ) A += np . finfo ( float ) . eps Asum = A . sum ( axis ) if axis and A . ndim > 1 : # Make sure we don't divide by zero. Asum [ Asum == 0 ] = 1 shape = list ( A . shape ) shape [ axis ] = 1 Asum . shape = shape A /= Asum return A
6,913
https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L40-L68
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Convert a p - state transition matrix and h - state matrices to the full transation matrix
def ph2full ( ptrans , htrans ) : n_pstates = len ( ptrans ) n_hstates = len ( htrans [ 0 , 0 ] ) N = n_pstates * n_hstates trans = np . zeros ( ( N , N ) ) for pidx in range ( n_pstates ) : for hidx in range ( n_hstates ) : trans [ pidx * n_hstates + hidx ] = ( ptrans [ pidx , : , np . newaxis ] * htrans [ pidx , : , hidx ] ) . flatten ( ) return trans
6,914
https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L71-L85
[ "def", "to_vobjects", "(", "self", ",", "filename", ",", "uids", "=", "None", ")", ":", "self", ".", "_update", "(", ")", "if", "not", "uids", ":", "uids", "=", "self", ".", "_reminders", "[", "filename", "]", "items", "=", "[", "]", "for", "uid", "in", "uids", ":", "cal", "=", "iCalendar", "(", ")", "self", ".", "_gen_vevent", "(", "self", ".", "_reminders", "[", "filename", "]", "[", "uid", "]", ",", "cal", ".", "add", "(", "'vevent'", ")", ")", "etag", "=", "md5", "(", ")", "etag", ".", "update", "(", "self", ".", "_reminders", "[", "filename", "]", "[", "uid", "]", "[", "'line'", "]", ".", "encode", "(", "\"utf-8\"", ")", ")", "items", ".", "append", "(", "(", "uid", ",", "cal", ",", "'\"%s\"'", "%", "etag", ".", "hexdigest", "(", ")", ")", ")", "return", "items" ]
Convert a full transmat to the respective p - state and h - state transmats
def full2ph ( trans , n_pstates ) : n_hstates = len ( trans ) / n_pstates htrans = np . zeros ( ( n_pstates , n_pstates , n_hstates , n_hstates ) ) for pidx1 , pidx2 in product ( range ( n_pstates ) , range ( n_pstates ) ) : idx1 = pidx1 * n_hstates idx2 = pidx2 * n_hstates htrans [ pidx1 , pidx2 ] = trans [ idx1 : idx1 + n_hstates , idx2 : idx2 + n_hstates ] ptrans = normalize ( htrans . sum ( axis = - 1 ) . sum ( axis = - 1 ) , axis = 1 ) htrans = normalize ( htrans , axis = 3 ) return ptrans , htrans
6,915
https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L88-L103
[ "def", "list_blobs", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "# pylint: disable=unused-argument", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "if", "'container'", "not", "in", "kwargs", ":", "raise", "SaltCloudSystemExit", "(", "'A container must be specified'", ")", "storageservice", "=", "_get_block_blob_service", "(", "kwargs", ")", "ret", "=", "{", "}", "try", ":", "for", "blob", "in", "storageservice", ".", "list_blobs", "(", "kwargs", "[", "'container'", "]", ")", ".", "items", ":", "ret", "[", "blob", ".", "name", "]", "=", "{", "'blob_type'", ":", "blob", ".", "properties", ".", "blob_type", ",", "'last_modified'", ":", "blob", ".", "properties", ".", "last_modified", ".", "isoformat", "(", ")", ",", "'server_encrypted'", ":", "blob", ".", "properties", ".", "server_encrypted", ",", "}", "except", "Exception", "as", "exc", ":", "log", ".", "warning", "(", "six", ".", "text_type", "(", "exc", ")", ")", "return", "ret" ]
Generate a unfiformly - random stochastic array or matrix
def gen_stochastic_matrix ( size , random_state = None ) : if not type ( size ) is tuple : size = ( 1 , size ) assert len ( size ) == 2 n = random_state . uniform ( size = ( size [ 0 ] , size [ 1 ] - 1 ) ) n = np . concatenate ( [ np . zeros ( ( size [ 0 ] , 1 ) ) , n , np . ones ( ( size [ 0 ] , 1 ) ) ] , axis = 1 ) A = np . diff ( np . sort ( n ) ) return A . squeeze ( )
6,916
https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L106-L119
[ "def", "OnMainToolbarToggle", "(", "self", ",", "event", ")", ":", "self", ".", "main_window", ".", "main_toolbar", ".", "SetGripperVisible", "(", "True", ")", "main_toolbar_info", "=", "self", ".", "main_window", ".", "_mgr", ".", "GetPane", "(", "\"main_window_toolbar\"", ")", "self", ".", "_toggle_pane", "(", "main_toolbar_info", ")", "event", ".", "Skip", "(", ")" ]
Empirically determine the steady state probabilities from a stochastic matrix
def steadystate ( A , max_iter = 100 ) : P = np . linalg . matrix_power ( A , max_iter ) # Determine the unique rows in A v = [ ] for i in range ( len ( P ) ) : if not np . any ( [ np . allclose ( P [ i ] , vi , ) for vi in v ] ) : v . append ( P [ i ] ) return normalize ( np . sum ( v , axis = 0 ) )
6,917
https://github.com/vmonaco/pohmm/blob/c00f8a62d3005a171d424549a55d46c421859ae9/pohmm/utils.py#L122-L134
[ "def", "up", "(", "job", ",", "input_file_id_1", ",", "input_file_id_2", ")", ":", "with", "job", ".", "fileStore", ".", "writeGlobalFileStream", "(", ")", "as", "(", "fileHandle", ",", "output_id", ")", ":", "with", "job", ".", "fileStore", ".", "readGlobalFileStream", "(", "input_file_id_1", ")", "as", "inputFileHandle1", ":", "with", "job", ".", "fileStore", ".", "readGlobalFileStream", "(", "input_file_id_2", ")", "as", "inputFileHandle2", ":", "job", ".", "fileStore", ".", "logToMaster", "(", "\"Merging %s and %s to %s\"", "%", "(", "input_file_id_1", ",", "input_file_id_2", ",", "output_id", ")", ")", "merge", "(", "inputFileHandle1", ",", "inputFileHandle2", ",", "fileHandle", ")", "# Cleanup up the input files - these deletes will occur after the completion is successful.", "job", ".", "fileStore", ".", "deleteGlobalFile", "(", "input_file_id_1", ")", "job", ".", "fileStore", ".", "deleteGlobalFile", "(", "input_file_id_2", ")", "return", "output_id" ]
create an inter - process communication pipe
def pipe ( ) : r , w = os . pipe ( ) return File . fromfd ( r , 'rb' ) , File . fromfd ( w , 'wb' )
6,918
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/ipc.py#L11-L19
[ "def", "to_lal_unit", "(", "aunit", ")", ":", "if", "isinstance", "(", "aunit", ",", "string_types", ")", ":", "aunit", "=", "units", ".", "Unit", "(", "aunit", ")", "aunit", "=", "aunit", ".", "decompose", "(", ")", "lunit", "=", "lal", ".", "Unit", "(", ")", "for", "base", ",", "power", "in", "zip", "(", "aunit", ".", "bases", ",", "aunit", ".", "powers", ")", ":", "# try this base", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "base", "]", "except", "KeyError", ":", "lalbase", "=", "None", "# otherwise loop through the equivalent bases", "for", "eqbase", "in", "base", ".", "find_equivalent_units", "(", ")", ":", "try", ":", "lalbase", "=", "LAL_UNIT_FROM_ASTROPY", "[", "eqbase", "]", "except", "KeyError", ":", "continue", "# if we didn't find anything, raise an exception", "if", "lalbase", "is", "None", ":", "raise", "ValueError", "(", "\"LAL has no unit corresponding to %r\"", "%", "base", ")", "lunit", "*=", "lalbase", "**", "power", "return", "lunit" ]
Returns the id of the target that is set as head
def get_id_head ( self ) : id_head = None for target_node in self : if target_node . is_head ( ) : id_head = target_node . get_id ( ) break return id_head
6,919
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L99-L110
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Adds a new target to the span with the specified id
def add_target_id ( self , this_id ) : new_target = Ctarget ( ) new_target . set_id ( this_id ) self . node . append ( new_target . get_node ( ) )
6,920
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L112-L120
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Adds new targets to the span with the specified ids
def create_from_ids ( self , list_ids ) : for this_id in list_ids : new_target = Ctarget ( ) new_target . set_id ( this_id ) self . node . append ( new_target . get_node ( ) )
6,921
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L122-L131
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Adds new targets to the span that are defined in a list
def create_from_targets ( self , list_targs ) : for this_target in list_targs : self . node . append ( this_target . get_node ( ) )
6,922
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/span_data.py#L133-L140
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Returns the statement object for the supplied identifier
def get_statement ( self , statement_id ) : if statement_id in self . idx : return Cstatement ( self . idx [ statement_id ] , self . type ) else : return None
6,923
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/attribution_data.py#L279-L288
[ "def", "_fill_gaps", "(", "tr", ")", ":", "tr", "=", "tr", ".", "split", "(", ")", "gaps", "=", "tr", ".", "get_gaps", "(", ")", "tr", "=", "tr", ".", "detrend", "(", ")", ".", "merge", "(", "fill_value", "=", "0", ")", "[", "0", "]", "gaps", "=", "[", "{", "'starttime'", ":", "gap", "[", "4", "]", ",", "'endtime'", ":", "gap", "[", "5", "]", "}", "for", "gap", "in", "gaps", "]", "return", "gaps", ",", "tr" ]
Adds a statement object to the layer
def add_statement ( self , statement_obj ) : if statement_obj . get_id ( ) in self . idx : raise ValueError ( "Statement with id {} already exists!" . format ( statement_obj . get_id ( ) ) ) self . node . append ( statement_obj . get_node ( ) ) self . idx [ statement_obj . get_id ( ) ] = statement_obj
6,924
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/attribution_data.py#L290-L300
[ "def", "find_apikey", "(", ")", ":", "env_keys", "=", "[", "'TINYPNG_APIKEY'", ",", "'TINYPNG_API_KEY'", "]", "paths", "=", "[", "]", "paths", ".", "append", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "abspath", "(", "\".\"", ")", ",", "\"tinypng.key\"", ")", ")", "# local directory", "paths", ".", "append", "(", "os", ".", "path", ".", "expanduser", "(", "\"~/.tinypng.key\"", ")", ")", "# home directory", "for", "env_key", "in", "env_keys", ":", "if", "os", ".", "environ", ".", "get", "(", "env_key", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "env_key", ")", "for", "path", "in", "paths", ":", "if", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "open", "(", "path", ",", "'rt'", ")", ".", "read", "(", ")", ".", "strip", "(", ")", "return", "None" ]
Returns group object for datacenter root group .
def RootGroup ( self ) : return ( clc . v2 . Group ( id = self . root_group_id , alias = self . alias , session = self . session ) )
6,925
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/datacenter.py#L81-L91
[ "def", "solve_dual_entropic", "(", "a", ",", "b", ",", "M", ",", "reg", ",", "batch_size", ",", "numItermax", "=", "10000", ",", "lr", "=", "1", ",", "log", "=", "False", ")", ":", "opt_alpha", ",", "opt_beta", "=", "sgd_entropic_regularization", "(", "a", ",", "b", ",", "M", ",", "reg", ",", "batch_size", ",", "numItermax", ",", "lr", ")", "pi", "=", "(", "np", ".", "exp", "(", "(", "opt_alpha", "[", ":", ",", "None", "]", "+", "opt_beta", "[", "None", ",", ":", "]", "-", "M", "[", ":", ",", ":", "]", ")", "/", "reg", ")", "*", "a", "[", ":", ",", "None", "]", "*", "b", "[", "None", ",", ":", "]", ")", "if", "log", ":", "log", "=", "{", "}", "log", "[", "'alpha'", "]", "=", "opt_alpha", "log", "[", "'beta'", "]", "=", "opt_beta", "return", "pi", ",", "log", "else", ":", "return", "pi" ]
constructs the LL context
def LL ( n ) : if ( n <= 0 ) : return Context ( '0' ) else : LL1 = LL ( n - 1 ) r1 = C1 ( 3 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - LL1 - LL1 r2 = LL1 - LL1 - LL1 return r1 + r2
6,926
https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L532-L539
[ "def", "download", "(", "self", ",", "directory", "=", "'~/Music'", ",", "song_name", "=", "'%a - %s - %A'", ")", ":", "formatted", "=", "self", ".", "format", "(", "song_name", ")", "path", "=", "os", ".", "path", ".", "expanduser", "(", "directory", ")", "+", "os", ".", "path", ".", "sep", "+", "formatted", "+", "'.mp3'", "try", ":", "raw", "=", "self", ".", "safe_download", "(", ")", "with", "open", "(", "path", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "raw", ")", "except", ":", "raise", "return", "formatted" ]
constructs the HH context
def HH ( n ) : if ( n <= 0 ) : return Context ( '1' ) else : LL1 = LL ( n - 1 ) HH1 = HH ( n - 1 ) r1 = C1 ( 3 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - LL1 - HH1 r2 = HH1 - HH1 - HH1 return r1 + r2
6,927
https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L540-L548
[ "def", "get_contract", "(", "firma", ",", "pravni_forma", ",", "sidlo", ",", "ic", ",", "dic", ",", "zastoupen", ")", ":", "contract_fn", "=", "_resource_context", "(", "\"Licencni_smlouva_o_dodavani_elektronickych_publikaci\"", "\"_a_jejich_uziti.rst\"", ")", "# load contract", "with", "open", "(", "contract_fn", ")", "as", "f", ":", "contract", "=", "f", ".", "read", "(", ")", "#.decode(\"utf-8\").encode(\"utf-8\")", "# make sure that `firma` has its heading mark", "firma", "=", "firma", ".", "strip", "(", ")", "firma", "=", "firma", "+", "\"\\n\"", "+", "(", "(", "len", "(", "firma", ")", "+", "1", ")", "*", "\"-\"", ")", "# patch template", "contract", "=", "Template", "(", "contract", ")", ".", "substitute", "(", "firma", "=", "firma", ",", "pravni_forma", "=", "pravni_forma", ".", "strip", "(", ")", ",", "sidlo", "=", "sidlo", ".", "strip", "(", ")", ",", "ic", "=", "ic", ".", "strip", "(", ")", ",", "dic", "=", "dic", ".", "strip", "(", ")", ",", "zastoupen", "=", "zastoupen", ".", "strip", "(", ")", ",", "resources_path", "=", "RES_PATH", ")", "return", "gen_pdf", "(", "contract", ",", "open", "(", "_resource_context", "(", "\"style.json\"", ")", ")", ".", "read", "(", ")", ",", ")" ]
constructs the AA context
def AA ( n ) : if ( n <= 1 ) : return Context ( '10\n00' ) else : AA1 = AA ( n - 1 ) r1 = C1 ( 2 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - AA1 r2 = AA1 - AA1 return r1 + r2
6,928
https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L550-L557
[ "def", "watch_and_wait", "(", "self", ",", "poll_interval", "=", "10", ",", "idle_log_timeout", "=", "None", ",", "kill_on_timeout", "=", "False", ",", "stash_log_method", "=", "None", ",", "tag_instances", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "wait_for_complete", "(", "self", ".", "_job_queue", ",", "job_list", "=", "self", ".", "job_list", ",", "job_name_prefix", "=", "self", ".", "basename", ",", "poll_interval", "=", "poll_interval", ",", "idle_log_timeout", "=", "idle_log_timeout", ",", "kill_on_log_timeout", "=", "kill_on_timeout", ",", "stash_log_method", "=", "stash_log_method", ",", "tag_instances", "=", "tag_instances", ",", "*", "*", "kwargs", ")" ]
constructs the BB context
def BB ( n ) : if ( n <= 1 ) : return Context ( '0\n1' ) else : BB1 = BB ( n - 1 ) AA1 = AA ( n - 1 ) r1 = C1 ( ( n - 1 ) * 2 ** ( n - 2 ) , 2 ** ( n - 1 ) ) - AA1 - BB1 r2 = BB1 - C1 ( 2 ** ( n - 1 ) , 2 ** ( n - 1 ) ) - BB1 return r1 + r2
6,929
https://github.com/pyfca/pyfca/blob/cf8cea9e76076dbf4bb3f38996dcb5491b0eb0b0/pyfca/implications.py#L558-L566
[ "def", "parse", "(", "s", ")", ":", "stopwatch", "=", "StopWatch", "(", ")", "for", "line", "in", "s", ".", "splitlines", "(", ")", ":", "if", "line", ".", "strip", "(", ")", ":", "parts", "=", "line", ".", "split", "(", "None", ")", "name", "=", "parts", "[", "0", "]", "if", "name", "!=", "\"%\"", ":", "# ie not the header line", "rest", "=", "(", "float", "(", "v", ")", "for", "v", "in", "parts", "[", "2", ":", "]", ")", "stopwatch", ".", "times", "[", "parts", "[", "0", "]", "]", ".", "merge", "(", "Stat", ".", "build", "(", "*", "rest", ")", ")", "return", "stopwatch" ]
The heart of the Instruction object . This method will make sure that all fields not entered will be defaulted to a correct value . Also checks for incongruities in the data entered if it was by the user .
def processAndSetDefaults ( self ) : # INPUT, OUTPUT, GIVEN + BUILDABLE DEPS if not self . input : raise ValueError ( NO_INPUT_FILE ) if not self . output : # Build directory must exist, right? if not self . build_directory : File ( ) pass # Can it be built? / reference self.output_format for this else : pass # if it is not congruent with other info provided if not self . build_directory : pass # Initialize it for dependency in self . given_dependencies : pass # Check if the dependcy exists if self . output_format != self . output . getType ( ) : raise ValueError ( "" ) # Given dependencies must actually exist! # output_name must be at a lower extenion level than input_name # The build directory return
6,930
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/instructions.py#L53-L82
[ "def", "start_indexing", "(", "self", ")", ":", "for", "filepath", "in", "self", ".", "filepaths", ":", "with", "open", "(", "filepath", ")", "as", "fp", ":", "blob", "=", "fp", ".", "read", "(", ")", "self", ".", "words", ".", "extend", "(", "self", ".", "tokenize", "(", "blob", ")", ")" ]
create a new greenlet from a function and arguments
def greenlet ( func , args = ( ) , kwargs = None ) : if args or kwargs : def target ( ) : return func ( * args , * * ( kwargs or { } ) ) else : target = func return compat . greenlet ( target , state . mainloop )
6,931
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L208-L228
[ "def", "getCiphertextLen", "(", "self", ",", "ciphertext", ")", ":", "plaintext_length", "=", "self", ".", "getPlaintextLen", "(", "ciphertext", ")", "ciphertext_length", "=", "plaintext_length", "+", "Encrypter", ".", "_CTXT_EXPANSION", "return", "ciphertext_length" ]
insert a greenlet into the scheduler
def schedule ( target = None , args = ( ) , kwargs = None ) : if target is None : def decorator ( target ) : return schedule ( target , args = args , kwargs = kwargs ) return decorator if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : glet = target else : glet = greenlet ( target , args , kwargs ) state . paused . append ( glet ) return target
6,932
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L263-L300
[ "def", "characterize_local_files", "(", "filedir", ",", "max_bytes", "=", "MAX_FILE_DEFAULT", ")", ":", "file_data", "=", "{", "}", "logging", ".", "info", "(", "'Characterizing files in {}'", ".", "format", "(", "filedir", ")", ")", "for", "filename", "in", "os", ".", "listdir", "(", "filedir", ")", ":", "filepath", "=", "os", ".", "path", ".", "join", "(", "filedir", ",", "filename", ")", "file_stats", "=", "os", ".", "stat", "(", "filepath", ")", "creation_date", "=", "arrow", ".", "get", "(", "file_stats", ".", "st_ctime", ")", ".", "isoformat", "(", ")", "file_size", "=", "file_stats", ".", "st_size", "if", "file_size", "<=", "max_bytes", ":", "file_md5", "=", "hashlib", ".", "md5", "(", ")", "with", "open", "(", "filepath", ",", "\"rb\"", ")", "as", "f", ":", "for", "chunk", "in", "iter", "(", "lambda", ":", "f", ".", "read", "(", "4096", ")", ",", "b\"\"", ")", ":", "file_md5", ".", "update", "(", "chunk", ")", "md5", "=", "file_md5", ".", "hexdigest", "(", ")", "file_data", "[", "filename", "]", "=", "{", "'tags'", ":", "guess_tags", "(", "filename", ")", ",", "'description'", ":", "''", ",", "'md5'", ":", "md5", ",", "'creation_date'", ":", "creation_date", ",", "}", "return", "file_data" ]
insert a greenlet into the scheduler to be run at a set time
def schedule_at ( unixtime , target = None , args = ( ) , kwargs = None ) : if target is None : def decorator ( target ) : return schedule_at ( unixtime , target , args = args , kwargs = kwargs ) return decorator if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : glet = target else : glet = greenlet ( target , args , kwargs ) state . timed_paused . insert ( unixtime , glet ) return target
6,933
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L303-L344
[ "def", "is_dicom", "(", "filename", ")", ":", "try", ":", "with", "open", "(", "filename", ")", "as", "f", ":", "d", "=", "f", ".", "read", "(", "132", ")", "return", "d", "[", "128", ":", "132", "]", "==", "\"DICM\"", "except", ":", "return", "False" ]
insert a greenlet into the scheduler to run after a set time
def schedule_in ( secs , target = None , args = ( ) , kwargs = None ) : return schedule_at ( time . time ( ) + secs , target , args , kwargs )
6,934
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L347-L378
[ "def", "DiffAnyArrays", "(", "self", ",", "oldObj", ",", "newObj", ",", "isElementLinks", ")", ":", "if", "len", "(", "oldObj", ")", "!=", "len", "(", "newObj", ")", ":", "__Log__", ".", "debug", "(", "'DiffAnyArrays: Array lengths do not match. %d != %d'", "%", "(", "len", "(", "oldObj", ")", ",", "len", "(", "newObj", ")", ")", ")", "return", "False", "for", "i", ",", "j", "in", "zip", "(", "oldObj", ",", "newObj", ")", ":", "if", "not", "self", ".", "DiffAnyObjects", "(", "i", ",", "j", ",", "isElementLinks", ")", ":", "__Log__", ".", "debug", "(", "'DiffAnyArrays: One of the elements do not match.'", ")", "return", "False", "return", "True" ]
insert a greenlet into the scheduler to run regularly at an interval
def schedule_recurring ( interval , target = None , maxtimes = 0 , starting_at = 0 , args = ( ) , kwargs = None ) : starting_at = starting_at or time . time ( ) if target is None : def decorator ( target ) : return schedule_recurring ( interval , target , maxtimes , starting_at , args , kwargs ) return decorator func = target if isinstance ( target , compat . greenlet ) or target is compat . main_greenlet : if target . dead : raise TypeError ( "can't schedule a dead greenlet" ) func = target . run def run_and_schedule_one ( tstamp , count ) : # pass in the time scheduled instead of just checking # time.time() so that delays don't add up if not maxtimes or count < maxtimes : tstamp += interval func ( * args , * * ( kwargs or { } ) ) schedule_at ( tstamp , run_and_schedule_one , args = ( tstamp , count + 1 ) ) firstrun = starting_at + interval schedule_at ( firstrun , run_and_schedule_one , args = ( firstrun , 0 ) ) return target
6,935
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L381-L442
[ "def", "decode", "(", "self", ",", "data", ")", ":", "file_name", "=", "self", ".", "_filename_decoder", ".", "decode", "(", "data", "[", "'filename'", "]", ")", "file_data", "=", "data", "[", "'contents'", "]", "i", "=", "0", "max_size", "=", "len", "(", "file_data", ")", "while", "file_data", "[", "i", ":", "i", "+", "1", "]", "!=", "'H'", "and", "i", "<", "max_size", ":", "i", "+=", "1", "if", "i", ">", "0", ":", "data", "[", "'contents'", "]", "=", "file_data", "[", "i", ":", "]", "transmission", "=", "self", ".", "_file_decoder", ".", "decode", "(", "data", "[", "'contents'", "]", ")", "[", "0", "]", "return", "CWRFile", "(", "file_name", ",", "transmission", ")" ]
schedule a greenlet to have an exception raised in it immediately
def schedule_exception ( exception , target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "can only schedule exceptions for greenlets" ) if target . dead : raise ValueError ( "can't send exceptions to a dead greenlet" ) schedule ( target ) state . to_raise [ target ] = exception
6,936
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L445-L458
[ "def", "scale_columns", "(", "A", ",", "v", ",", "copy", "=", "True", ")", ":", "v", "=", "np", ".", "ravel", "(", "v", ")", "M", ",", "N", "=", "A", ".", "shape", "if", "not", "isspmatrix", "(", "A", ")", ":", "raise", "ValueError", "(", "'scale columns needs a sparse matrix'", ")", "if", "N", "!=", "len", "(", "v", ")", ":", "raise", "ValueError", "(", "'scale vector has incompatible shape'", ")", "if", "copy", ":", "A", "=", "A", ".", "copy", "(", ")", "A", ".", "data", "=", "np", ".", "asarray", "(", "A", ".", "data", ",", "dtype", "=", "upcast", "(", "A", ".", "dtype", ",", "v", ".", "dtype", ")", ")", "else", ":", "v", "=", "np", ".", "asarray", "(", "v", ",", "dtype", "=", "A", ".", "dtype", ")", "if", "isspmatrix_csr", "(", "A", ")", ":", "csr_scale_columns", "(", "M", ",", "N", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "A", ".", "data", ",", "v", ")", "elif", "isspmatrix_bsr", "(", "A", ")", ":", "R", ",", "C", "=", "A", ".", "blocksize", "bsr_scale_columns", "(", "int", "(", "M", "/", "R", ")", ",", "int", "(", "N", "/", "C", ")", ",", "R", ",", "C", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "np", ".", "ravel", "(", "A", ".", "data", ")", ",", "v", ")", "elif", "isspmatrix_csc", "(", "A", ")", ":", "pyamg", ".", "amg_core", ".", "csc_scale_columns", "(", "M", ",", "N", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "A", ".", "data", ",", "v", ")", "else", ":", "fmt", "=", "A", ".", "format", "A", "=", "scale_columns", "(", "csr_matrix", "(", "A", ")", ",", "v", ")", ".", "asformat", "(", "fmt", ")", "return", "A" ]
schedule a greenlet to have an exception raised at a unix timestamp
def schedule_exception_at ( unixtime , exception , target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "can only schedule exceptions for greenlets" ) if target . dead : raise ValueError ( "can't send exceptions to a dead greenlet" ) schedule_at ( unixtime , target ) state . to_raise [ target ] = exception
6,937
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L461-L476
[ "def", "identical", "(", "self", ",", "o", ")", ":", "if", "self", ".", "_reversed", "!=", "o", ".", "_reversed", ":", "return", "False", "for", "region", ",", "si", "in", "self", ".", "regions", ".", "items", "(", ")", ":", "if", "region", "in", "o", ".", "regions", ":", "o_si", "=", "o", ".", "regions", "[", "region", "]", "if", "not", "si", ".", "identical", "(", "o_si", ")", ":", "return", "False", "else", ":", "return", "False", "return", "True" ]
schedule a greenlet receive an exception after a number of seconds
def schedule_exception_in ( secs , exception , target ) : schedule_exception_at ( time . time ( ) + secs , exception , target )
6,938
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L479-L489
[ "def", "_tracked_model_diff", "(", "self", ")", ":", "initial_state", "=", "self", ".", "_tracked_model_initial_state", "current_state", "=", "serializer", ".", "dump_model", "(", "self", ")", "if", "current_state", "==", "initial_state", ":", "return", "None", "change_log", "=", "{", "}", "for", "field", "in", "initial_state", ":", "old_value", "=", "initial_state", "[", "field", "]", "[", "Field", ".", "VALUE", "]", "new_value", "=", "current_state", "[", "field", "]", "[", "Field", ".", "VALUE", "]", "if", "old_value", "==", "new_value", ":", "continue", "field_data", "=", "initial_state", ".", "copy", "(", ")", "[", "field", "]", "del", "field_data", "[", "Field", ".", "VALUE", "]", "field_data", "[", "Field", ".", "OLD", "]", "=", "old_value", "field_data", "[", "Field", ".", "NEW", "]", "=", "new_value", "change_log", "[", "field", "]", "=", "field_data", "return", "change_log", "or", "None" ]
schedule a greenlet to be stopped immediately
def end ( target ) : if not isinstance ( target , compat . greenlet ) : raise TypeError ( "argument must be a greenlet" ) if not target . dead : schedule ( target ) state . to_raise [ target ] = compat . GreenletExit ( )
6,939
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L492-L502
[ "def", "read_data", "(", "self", ",", "blocksize", "=", "4096", ")", ":", "frames", "=", "ctypes", ".", "c_uint", "(", "blocksize", "//", "self", ".", "_client_fmt", ".", "mBytesPerFrame", ")", "buf", "=", "ctypes", ".", "create_string_buffer", "(", "blocksize", ")", "buflist", "=", "AudioBufferList", "(", ")", "buflist", ".", "mNumberBuffers", "=", "1", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mNumberChannels", "=", "self", ".", "_client_fmt", ".", "mChannelsPerFrame", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mDataByteSize", "=", "blocksize", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mData", "=", "ctypes", ".", "cast", "(", "buf", ",", "ctypes", ".", "c_void_p", ")", "while", "True", ":", "check", "(", "_coreaudio", ".", "ExtAudioFileRead", "(", "self", ".", "_obj", ",", "ctypes", ".", "byref", "(", "frames", ")", ",", "ctypes", ".", "byref", "(", "buflist", ")", ")", ")", "assert", "buflist", ".", "mNumberBuffers", "==", "1", "size", "=", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mDataByteSize", "if", "not", "size", ":", "break", "data", "=", "ctypes", ".", "cast", "(", "buflist", ".", "mBuffers", "[", "0", "]", ".", "mData", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_char", ")", ")", "blob", "=", "data", "[", ":", "size", "]", "yield", "blob" ]
run all the registered exception handlers
def handle_exception ( klass , exc , tb , coro = None ) : if coro is None : coro = compat . getcurrent ( ) replacement = [ ] for weak in state . local_exception_handlers . get ( coro , ( ) ) : func = weak ( ) if func is None : continue try : func ( klass , exc , tb ) except Exception : continue replacement . append ( weak ) if replacement : state . local_exception_handlers [ coro ] [ : ] = replacement replacement = [ ] for weak in state . global_exception_handlers : func = weak ( ) if func is None : continue try : func ( klass , exc , tb ) except Exception : continue replacement . append ( weak ) state . global_exception_handlers [ : ] = replacement
6,940
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L606-L659
[ "def", "updateItem", "(", "self", ",", "itemParameters", ",", "clearEmptyFields", "=", "False", ",", "data", "=", "None", ",", "metadata", "=", "None", ",", "text", "=", "None", ",", "serviceUrl", "=", "None", ",", "multipart", "=", "False", ")", ":", "thumbnail", "=", "None", "largeThumbnail", "=", "None", "files", "=", "{", "}", "params", "=", "{", "\"f\"", ":", "\"json\"", ",", "}", "if", "clearEmptyFields", ":", "params", "[", "\"clearEmptyFields\"", "]", "=", "clearEmptyFields", "if", "serviceUrl", "is", "not", "None", ":", "params", "[", "'url'", "]", "=", "serviceUrl", "if", "text", "is", "not", "None", ":", "params", "[", "'text'", "]", "=", "text", "if", "isinstance", "(", "itemParameters", ",", "ItemParameter", ")", "==", "False", ":", "raise", "AttributeError", "(", "\"itemParameters must be of type parameter.ItemParameter\"", ")", "keys_to_delete", "=", "[", "'id'", ",", "'owner'", ",", "'size'", ",", "'numComments'", ",", "'numRatings'", ",", "'avgRating'", ",", "'numViews'", ",", "'overwrite'", "]", "dictItem", "=", "itemParameters", ".", "value", "for", "key", "in", "keys_to_delete", ":", "if", "key", "in", "dictItem", ":", "del", "dictItem", "[", "key", "]", "for", "key", "in", "dictItem", ":", "if", "key", "==", "\"thumbnail\"", ":", "files", "[", "'thumbnail'", "]", "=", "dictItem", "[", "'thumbnail'", "]", "elif", "key", "==", "\"largeThumbnail\"", ":", "files", "[", "'largeThumbnail'", "]", "=", "dictItem", "[", "'largeThumbnail'", "]", "elif", "key", "==", "\"metadata\"", ":", "metadata", "=", "dictItem", "[", "'metadata'", "]", "if", "os", ".", "path", ".", "basename", "(", "metadata", ")", "!=", "'metadata.xml'", ":", "tempxmlfile", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "\"metadata.xml\"", ")", "if", "os", ".", "path", ".", "isfile", "(", "tempxmlfile", ")", "==", "True", ":", "os", ".", "remove", "(", "tempxmlfile", ")", "import", "shutil", "shutil", ".", "copy", "(", "metadata", ",", "tempxmlfile", ")", "metadata", "=", "tempxmlfile", "files", "[", "'metadata'", "]", "=", "dictItem", "[", "'metadata'", "]", "else", ":", "params", "[", "key", "]", "=", "dictItem", "[", "key", "]", "if", "data", "is", "not", "None", ":", "files", "[", "'file'", "]", "=", "data", "if", "metadata", "and", "os", ".", "path", ".", "isfile", "(", "metadata", ")", ":", "files", "[", "'metadata'", "]", "=", "metadata", "url", "=", "\"%s/update\"", "%", "self", ".", "root", "if", "multipart", ":", "itemID", "=", "self", ".", "id", "params", "[", "'multipart'", "]", "=", "True", "params", "[", "'fileName'", "]", "=", "os", ".", "path", ".", "basename", "(", "data", ")", "res", "=", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")", "itemPartJSON", "=", "self", ".", "addByPart", "(", "filePath", "=", "data", ")", "res", "=", "self", ".", "commit", "(", "wait", "=", "True", ",", "additionalParams", "=", "{", "'type'", ":", "self", ".", "type", "}", ")", "else", ":", "res", "=", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "files", "=", "files", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "force_form_post", "=", "True", ")", "self", ".", "__init", "(", ")", "return", "self" ]
add a callback for when an exception goes uncaught in any greenlet
def global_exception_handler ( handler ) : if not hasattr ( handler , "__call__" ) : raise TypeError ( "exception handlers must be callable" ) log . info ( "setting a new global exception handler" ) state . global_exception_handlers . append ( weakref . ref ( handler ) ) return handler
6,941
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L662-L682
[ "def", "Add", "(", "self", ",", "other", ")", ":", "if", "len", "(", "self", ".", "data", ")", "!=", "len", "(", "other", ".", "data", ")", ":", "raise", "RuntimeError", "(", "\"Can only add series of identical lengths.\"", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "data", ")", ")", ":", "if", "self", ".", "data", "[", "i", "]", "[", "1", "]", "!=", "other", ".", "data", "[", "i", "]", "[", "1", "]", ":", "raise", "RuntimeError", "(", "\"Timestamp mismatch.\"", ")", "if", "self", ".", "data", "[", "i", "]", "[", "0", "]", "is", "None", "and", "other", ".", "data", "[", "i", "]", "[", "0", "]", "is", "None", ":", "continue", "self", ".", "data", "[", "i", "]", "[", "0", "]", "=", "(", "self", ".", "data", "[", "i", "]", "[", "0", "]", "or", "0", ")", "+", "(", "other", ".", "data", "[", "i", "]", "[", "0", "]", "or", "0", ")" ]
remove a callback from the list of global exception handlers
def remove_global_exception_handler ( handler ) : for i , cb in enumerate ( state . global_exception_handlers ) : cb = cb ( ) if cb is not None and cb is handler : state . global_exception_handlers . pop ( i ) log . info ( "removing a global exception handler" ) return True return False
6,942
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L685-L701
[ "def", "search", "(", "self", ",", "query", ",", "search_term", ",", "search_field", ",", "catalog", ")", ":", "logger", ".", "info", "(", "\"Reference Widget Catalog: {}\"", ".", "format", "(", "catalog", ".", "id", ")", ")", "if", "not", "search_term", ":", "return", "catalog", "(", "query", ")", "index", "=", "self", ".", "get_index", "(", "search_field", ",", "catalog", ")", "if", "not", "index", ":", "logger", ".", "warn", "(", "\"*** Index not found: '{}'\"", ".", "format", "(", "search_field", ")", ")", "return", "[", "]", "meta", "=", "index", ".", "meta_type", "if", "meta", "==", "\"TextIndexNG3\"", ":", "query", "[", "index", ".", "id", "]", "=", "\"{}*\"", ".", "format", "(", "search_term", ")", "elif", "meta", "==", "\"ZCTextIndex\"", ":", "logger", ".", "warn", "(", "\"*** Field '{}' ({}). Better use TextIndexNG3\"", ".", "format", "(", "meta", ",", "search_field", ")", ")", "query", "[", "index", ".", "id", "]", "=", "\"{}*\"", ".", "format", "(", "search_term", ")", "elif", "meta", "in", "[", "\"FieldIndex\"", ",", "\"KeywordIndex\"", "]", ":", "logger", ".", "warn", "(", "\"*** Field '{}' ({}). Better use TextIndexNG3\"", ".", "format", "(", "meta", ",", "search_field", ")", ")", "query", "[", "index", ".", "id", "]", "=", "search_term", "else", ":", "logger", ".", "warn", "(", "\"*** Index '{}' ({}) not supported\"", ".", "format", "(", "search_field", ",", "meta", ")", ")", "return", "[", "]", "logger", ".", "info", "(", "\"Reference Widget Query: {}\"", ".", "format", "(", "repr", "(", "query", ")", ")", ")", "return", "catalog", "(", "query", ")" ]
add a callback for when an exception occurs in a particular greenlet
def local_exception_handler ( handler = None , coro = None ) : if handler is None : return lambda h : local_exception_handler ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "exception handlers must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a new coroutine local exception handler" ) state . local_exception_handlers . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
6,943
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L704-L733
[ "def", "Add", "(", "self", ",", "other", ")", ":", "if", "len", "(", "self", ".", "data", ")", "!=", "len", "(", "other", ".", "data", ")", ":", "raise", "RuntimeError", "(", "\"Can only add series of identical lengths.\"", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "data", ")", ")", ":", "if", "self", ".", "data", "[", "i", "]", "[", "1", "]", "!=", "other", ".", "data", "[", "i", "]", "[", "1", "]", ":", "raise", "RuntimeError", "(", "\"Timestamp mismatch.\"", ")", "if", "self", ".", "data", "[", "i", "]", "[", "0", "]", "is", "None", "and", "other", ".", "data", "[", "i", "]", "[", "0", "]", "is", "None", ":", "continue", "self", ".", "data", "[", "i", "]", "[", "0", "]", "=", "(", "self", ".", "data", "[", "i", "]", "[", "0", "]", "or", "0", ")", "+", "(", "other", ".", "data", "[", "i", "]", "[", "0", "]", "or", "0", ")" ]
remove a callback from the list of exception handlers for a coroutine
def remove_local_exception_handler ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_exception_handlers . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : state . local_exception_handlers [ coro ] . pop ( i ) log . info ( "removing a coroutine local exception handler" ) return True return False
6,944
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L736-L755
[ "def", "compare_documents", "(", "self", ",", "file_1", ",", "file_2", ",", "file_1_content_type", "=", "None", ",", "file_2_content_type", "=", "None", ",", "file_1_label", "=", "None", ",", "file_2_label", "=", "None", ",", "model", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "file_1", "is", "None", ":", "raise", "ValueError", "(", "'file_1 must be provided'", ")", "if", "file_2", "is", "None", ":", "raise", "ValueError", "(", "'file_2 must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'compare-comply'", ",", "'V1'", ",", "'compare_documents'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", ",", "'file_1_label'", ":", "file_1_label", ",", "'file_2_label'", ":", "file_2_label", ",", "'model'", ":", "model", "}", "form_data", "=", "{", "}", "form_data", "[", "'file_1'", "]", "=", "(", "None", ",", "file_1", ",", "file_1_content_type", "or", "'application/octet-stream'", ")", "form_data", "[", "'file_2'", "]", "=", "(", "None", ",", "file_2", ",", "file_2_content_type", "or", "'application/octet-stream'", ")", "url", "=", "'/v1/comparison'", "response", "=", "self", ".", "request", "(", "method", "=", "'POST'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "files", "=", "form_data", ",", "accept_json", "=", "True", ")", "return", "response" ]
add a callback to run in every switch between coroutines
def global_hook ( handler ) : if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) log . info ( "setting a new global hook callback" ) state . global_hooks . append ( weakref . ref ( handler ) ) return handler
6,945
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L758-L776
[ "def", "write_to_file", "(", "data", ",", "path", ")", ":", "if", "path", ".", "endswith", "(", "'.xml'", ")", ":", "filename", "=", "path", "else", ":", "filename", "=", "path", "+", "'.xml'", "tag_data", "=", "ET", ".", "Element", "(", "'data'", ")", "xml_file", "=", "open", "(", "filename", ",", "\"w\"", ")", "i", "=", "0", "for", "line", "in", "data", ":", "i", "+=", "1", "tag_item", "=", "ET", ".", "SubElement", "(", "tag_data", ",", "'item'", ")", "tag_date", "=", "ET", ".", "SubElement", "(", "tag_item", ",", "'date'", ")", "tag_desc", "=", "ET", ".", "SubElement", "(", "tag_item", ",", "'desc'", ")", "tag_currency", "=", "ET", ".", "SubElement", "(", "tag_item", ",", "'currency'", ")", "tag_amount", "=", "ET", ".", "SubElement", "(", "tag_item", ",", "'amount'", ")", "tag_item", ".", "set", "(", "'id'", ",", "str", "(", "i", ")", ")", "tag_date", ".", "text", "=", "line", "[", "'date'", "]", ".", "strftime", "(", "'%d/%m/%Y'", ")", "tag_desc", ".", "text", "=", "line", "[", "'desc'", "]", "tag_currency", ".", "text", "=", "line", "[", "'currency'", "]", "tag_amount", ".", "text", "=", "str", "(", "line", "[", "'amount'", "]", ")", "xml_file", ".", "write", "(", "prettify", "(", "tag_data", ")", ")", "xml_file", ".", "close", "(", ")" ]
remove a callback from the list of global hooks
def remove_global_hook ( handler ) : for i , cb in enumerate ( state . global_hooks ) : cb = cb ( ) if cb is not None and cb is handler : state . global_hooks . pop ( i ) log . info ( "removing a global hook callback" ) return True return False
6,946
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L779-L795
[ "def", "unindex_layers_with_issues", "(", "self", ",", "use_cache", "=", "False", ")", ":", "from", "hypermap", ".", "aggregator", ".", "models", "import", "Issue", ",", "Layer", ",", "Service", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "layer_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Layer", ")", "service_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Service", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "layer_type", ".", "id", ")", ":", "unindex_layer", "(", "issue", ".", "content_object", ".", "id", ",", "use_cache", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "service_type", ".", "id", ")", ":", "for", "layer", "in", "issue", ".", "content_object", ".", "layer_set", ".", "all", "(", ")", ":", "unindex_layer", "(", "layer", ".", "id", ",", "use_cache", ")" ]
add a callback to run every time a greenlet is about to be switched to
def local_incoming_hook ( handler = None , coro = None ) : if handler is None : return lambda h : local_incoming_hook ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a coroutine incoming local hook callback" ) state . local_to_hooks . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
6,947
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L798-L830
[ "def", "UNION_DECL", "(", "self", ",", "cursor", ",", "num", "=", "None", ")", ":", "return", "self", ".", "_record_decl", "(", "cursor", ",", "typedesc", ".", "Union", ",", "num", ")" ]
remove a callback from the incoming hooks for a particular coro
def remove_local_incoming_hook ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_to_hooks . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : log . info ( "removing a coroutine incoming local hook callback" ) state . local_to_hooks [ coro ] . pop ( i ) return True return False
6,948
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L833-L852
[ "def", "is_mdgel", "(", "self", ")", ":", "# TODO: this likely reads the second page from file", "try", ":", "ismdgel", "=", "self", ".", "pages", "[", "0", "]", ".", "is_mdgel", "or", "self", ".", "pages", "[", "1", "]", ".", "is_mdgel", "if", "ismdgel", ":", "self", ".", "is_uniform", "=", "False", "return", "ismdgel", "except", "IndexError", ":", "return", "False" ]
add a callback to run every time a greenlet is switched away from
def local_outgoing_hook ( handler = None , coro = None ) : if handler is None : return lambda h : local_outgoing_hook ( h , coro ) if not hasattr ( handler , "__call__" ) : raise TypeError ( "trace hooks must be callable" ) if coro is None : coro = compat . getcurrent ( ) log . info ( "setting a coroutine local outgoing hook callback" ) state . local_from_hooks . setdefault ( coro , [ ] ) . append ( weakref . ref ( handler ) ) return handler
6,949
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L855-L886
[ "def", "metadata", "(", "self", ")", ":", "auto", "=", "{", "u'records'", ":", "self", ".", "size", "}", "auto", ".", "update", "(", "self", ".", "meta", ")", "return", "auto" ]
remove a callback from the outgoing hooks for a particular coro
def remove_local_outgoing_hook ( handler , coro = None ) : if coro is None : coro = compat . getcurrent ( ) for i , cb in enumerate ( state . local_from_hooks . get ( coro , [ ] ) ) : cb = cb ( ) if cb is not None and cb is handler : log . info ( "removing a coroutine outgoing local hook callback" ) state . local_from_hooks [ coro ] . pop ( i ) return True return False
6,950
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L889-L908
[ "def", "_is_enlargement", "(", "locator", ",", "global_index", ")", ":", "if", "(", "is_list_like", "(", "locator", ")", "and", "not", "is_slice", "(", "locator", ")", "and", "len", "(", "locator", ")", ">", "0", "and", "not", "is_boolean_array", "(", "locator", ")", "and", "(", "isinstance", "(", "locator", ",", "type", "(", "global_index", "[", "0", "]", ")", ")", "and", "locator", "not", "in", "global_index", ")", ")", ":", "n_diff_elems", "=", "len", "(", "pandas", ".", "Index", "(", "locator", ")", ".", "difference", "(", "global_index", ")", ")", "is_enlargement_boolean", "=", "n_diff_elems", ">", "0", "return", "is_enlargement_boolean", "return", "False" ]
turn off EINTR - raising from emulated syscalls on interruption by signals
def set_ignore_interrupts ( flag = True ) : log . info ( "setting ignore_interrupts to %r" % flag ) state . ignore_interrupts = bool ( flag )
6,951
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L911-L924
[ "def", "create_dirs", "(", "self", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "_path", ")", ":", "os", ".", "makedirs", "(", "self", ".", "_path", ")", "for", "dir_name", "in", "[", "self", ".", "OBJ_DIR", ",", "self", ".", "TMP_OBJ_DIR", ",", "self", ".", "PKG_DIR", ",", "self", ".", "CACHE_DIR", "]", ":", "path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "dir_name", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "os", ".", "mkdir", "(", "path", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "_version_path", "(", ")", ")", ":", "self", ".", "_write_format_version", "(", ")" ]
replace the scheduler s poller throwing away any pre - existing state
def reset_poller ( poll = None ) : state . poller = poll or poller . best ( ) log . info ( "resetting fd poller, using %s" % type ( state . poller ) . __name__ )
6,952
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/scheduler.py#L927-L933
[ "def", "decode", "(", "self", ",", "data", ")", ":", "file_name", "=", "self", ".", "_filename_decoder", ".", "decode", "(", "data", "[", "'filename'", "]", ")", "file_data", "=", "data", "[", "'contents'", "]", "i", "=", "0", "max_size", "=", "len", "(", "file_data", ")", "while", "file_data", "[", "i", ":", "i", "+", "1", "]", "!=", "'H'", "and", "i", "<", "max_size", ":", "i", "+=", "1", "if", "i", ">", "0", ":", "data", "[", "'contents'", "]", "=", "file_data", "[", "i", ":", "]", "transmission", "=", "self", ".", "_file_decoder", ".", "decode", "(", "data", "[", "'contents'", "]", ")", "[", "0", "]", "return", "CWRFile", "(", "file_name", ",", "transmission", ")" ]
Finds a given cyther resource in the test subdirectory in cyther package
def find_resource ( r , * , pkg = 'cyther' ) : file_path = pkg_resources . resource_filename ( pkg , os . path . join ( 'test' , r ) ) if not os . path . isfile ( file_path ) : msg = "Resource '{}' does not exist" raise FileNotFoundError ( msg . format ( file_path ) ) return file_path
6,953
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L17-L26
[ "def", "add", "(", "self", ",", "string", ":", "(", "str", ",", "list", ")", ")", ":", "if", "len", "(", "self", ".", "_entries", ")", "==", "1", ":", "self", ".", "_entries", "[", "0", "]", ".", "delete", "(", "0", ",", "'end'", ")", "self", ".", "_entries", "[", "0", "]", ".", "insert", "(", "0", ",", "string", ")", "else", ":", "if", "len", "(", "string", ")", "!=", "len", "(", "self", ".", "_entries", ")", ":", "raise", "ValueError", "(", "'the \"string\" list must be '", "'equal to the number of entries'", ")", "for", "i", ",", "e", "in", "enumerate", "(", "self", ".", "_entries", ")", ":", "self", ".", "_entries", "[", "i", "]", ".", "delete", "(", "0", ",", "'end'", ")", "self", ".", "_entries", "[", "i", "]", ".", "insert", "(", "0", ",", "string", "[", "i", "]", ")" ]
Check that two outputs have the same contents as one another even if they aren t sorted yet
def assert_output ( output , assert_equal ) : sorted_output = sorted ( output ) sorted_assert = sorted ( assert_equal ) if sorted_output != sorted_assert : raise ValueError ( ASSERT_ERROR . format ( sorted_output , sorted_assert ) )
6,954
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L78-L86
[ "def", "unregister_vm", "(", "vm_ref", ")", ":", "vm_name", "=", "get_managed_object_name", "(", "vm_ref", ")", "log", ".", "trace", "(", "'Destroying vm \\'%s\\''", ",", "vm_name", ")", "try", ":", "vm_ref", ".", "UnregisterVM", "(", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")" ]
Write a dictionary of string keys to a file
def write_dict_to_file ( file_path , obj ) : lines = [ ] for key , value in obj . items ( ) : lines . append ( key + ':' + repr ( value ) + '\n' ) with open ( file_path , 'w+' ) as file : file . writelines ( lines ) return None
6,955
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L89-L100
[ "def", "_at", "(", "self", ",", "t", ")", ":", "rITRF", ",", "vITRF", ",", "error", "=", "self", ".", "ITRF_position_velocity_error", "(", "t", ")", "rGCRS", ",", "vGCRS", "=", "ITRF_to_GCRS2", "(", "t", ",", "rITRF", ",", "vITRF", ")", "return", "rGCRS", ",", "vGCRS", ",", "rGCRS", ",", "error" ]
Read a dictionary of strings from a file
def read_dict_from_file ( file_path ) : with open ( file_path ) as file : lines = file . read ( ) . splitlines ( ) obj = { } for line in lines : key , value = line . split ( ':' , maxsplit = 1 ) obj [ key ] = eval ( value ) return obj
6,956
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L103-L115
[ "def", "aggregate", "(", "self", ",", "val1", ",", "val2", ")", ":", "assert", "val1", "is", "not", "None", "assert", "val2", "is", "not", "None", "return", "self", ".", "_aggregator", "(", "val1", ",", "val2", ")" ]
Ask the user to input something on the terminal level check their response and ask again if they didn t answer correctly
def get_input ( prompt , check , * , redo_prompt = None , repeat_prompt = False ) : if isinstance ( check , str ) : check = ( check , ) to_join = [ ] for item in check : if item : to_join . append ( str ( item ) ) else : to_join . append ( "''" ) prompt += " [{}]: " . format ( '/' . join ( to_join ) ) if repeat_prompt : redo_prompt = prompt elif not redo_prompt : redo_prompt = "Incorrect input, please choose from {}: " "" . format ( str ( check ) ) if callable ( check ) : def _checker ( r ) : return check ( r ) elif isinstance ( check , tuple ) : def _checker ( r ) : return r in check else : raise ValueError ( RESPONSES_ERROR . format ( type ( check ) ) ) response = input ( prompt ) while not _checker ( response ) : print ( response , type ( response ) ) response = input ( redo_prompt if redo_prompt else prompt ) return response
6,957
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L121-L155
[ "def", "create", "(", "self", ",", "relpath", ",", "langs", ",", "category", ")", ":", "return", "SourceRoot", "(", "relpath", ",", "tuple", "(", "self", ".", "_canonicalize_langs", "(", "langs", ")", ")", ",", "category", ")" ]
Asks for a single choice out of multiple items . Given those items and a prompt to ask the user with
def get_choice ( prompt , choices ) : print ( ) checker = [ ] for offset , choice in enumerate ( choices ) : number = offset + 1 print ( "\t{}): '{}'\n" . format ( number , choice ) ) checker . append ( str ( number ) ) response = get_input ( prompt , tuple ( checker ) + ( '' , ) ) if not response : print ( "Exiting..." ) exit ( ) offset = int ( response ) - 1 selected = choices [ offset ] return selected
6,958
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L158-L178
[ "def", "_normalize_interval", "(", "start", ",", "end", ",", "value", ")", ":", "if", "not", "isinstance", "(", "start", ",", "datetime", ")", ":", "start", "=", "datetime", ".", "combine", "(", "start", ",", "START_OF_DAY", ")", "end", "=", "datetime", ".", "combine", "(", "end", ",", "START_OF_DAY", ")", "if", "start", ".", "tzinfo", "is", "None", ":", "start", "=", "pytz", ".", "UTC", ".", "localize", "(", "start", ")", "end", "=", "pytz", ".", "UTC", ".", "localize", "(", "end", ")", "else", ":", "start", "=", "start", ".", "astimezone", "(", "pytz", ".", "UTC", ")", "end", "=", "end", ".", "astimezone", "(", "pytz", ".", "UTC", ")", "return", "start", ",", "end" ]
A function to generate a batch of commands to run in a specific order as to meet all the dependencies for each command . For example the commands with no dependencies are run first and the commands with the most deep dependencies are run last
def generateBatches ( tasks , givens ) : _removeGivensFromTasks ( tasks , givens ) batches = [ ] while tasks : batch = set ( ) for task , dependencies in tasks . items ( ) : if not dependencies : batch . add ( task ) if not batch : _batchErrorProcessing ( tasks ) for task in batch : del tasks [ task ] for task , dependencies in tasks . items ( ) : for item in batch : if item in dependencies : tasks [ task ] . remove ( item ) batches . append ( batch ) return batches
6,959
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/tools.py#L191-L219
[ "def", "with_headers", "(", "self", ",", "headers", ")", ":", "return", "self", ".", "replace", "(", "headers", "=", "_merge_maps", "(", "self", ".", "headers", ",", "headers", ")", ")" ]
Get group by providing name ID description or other unique key .
def Get ( self , key ) : for group in self . groups : if group . id . lower ( ) == key . lower ( ) : return ( group ) elif group . name . lower ( ) == key . lower ( ) : return ( group ) elif group . description . lower ( ) == key . lower ( ) : return ( group ) raise ( clc . CLCException ( "Group not found" ) )
6,960
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L65-L81
[ "def", "_set_final_freeness", "(", "self", ",", "flag", ")", ":", "if", "flag", ":", "self", ".", "state", ".", "memory", ".", "store", "(", "self", ".", "heap_base", "+", "self", ".", "heap_size", "-", "self", ".", "_chunk_size_t_size", ",", "~", "CHUNK_P_MASK", ")", "else", ":", "self", ".", "state", ".", "memory", ".", "store", "(", "self", ".", "heap_base", "+", "self", ".", "heap_size", "-", "self", ".", "_chunk_size_t_size", ",", "CHUNK_P_MASK", ")" ]
Search group list by providing partial name ID description or other key .
def Search ( self , key ) : results = [ ] for group in self . groups : if group . id . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) elif group . name . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) elif group . description . lower ( ) . find ( key . lower ( ) ) != - 1 : results . append ( group ) return ( results )
6,961
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L84-L98
[ "def", "tar_and_copy_usr_dir", "(", "usr_dir", ",", "train_dir", ")", ":", "tf", ".", "logging", ".", "info", "(", "\"Tarring and pushing t2t_usr_dir.\"", ")", "usr_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "expanduser", "(", "usr_dir", ")", ")", "# Copy usr dir to a temp location", "top_dir", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "\"t2t_usr_container\"", ")", "tmp_usr_dir", "=", "os", ".", "path", ".", "join", "(", "top_dir", ",", "usr_dir_lib", ".", "INTERNAL_USR_DIR_PACKAGE", ")", "shutil", ".", "rmtree", "(", "top_dir", ",", "ignore_errors", "=", "True", ")", "shutil", ".", "copytree", "(", "usr_dir", ",", "tmp_usr_dir", ")", "# Insert setup.py if one does not exist", "top_setup_fname", "=", "os", ".", "path", ".", "join", "(", "top_dir", ",", "\"setup.py\"", ")", "setup_file_str", "=", "get_setup_file", "(", "name", "=", "\"DummyUsrDirPackage\"", ",", "packages", "=", "get_requirements", "(", "usr_dir", ")", ")", "with", "tf", ".", "gfile", ".", "Open", "(", "top_setup_fname", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "setup_file_str", ")", "usr_tar", "=", "_tar_and_copy", "(", "top_dir", ",", "train_dir", ")", "return", "usr_tar" ]
Gets a list of groups within a given account .
def GetAll ( root_group_id , alias = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) groups = [ ] for r in clc . v2 . API . Call ( 'GET' , 'groups/%s/%s' % ( alias , root_group_id ) , { } , session = session ) [ 'groups' ] : groups . append ( Group ( id = r [ 'id' ] , alias = alias , group_obj = r , session = session ) ) return ( groups )
6,962
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L105-L118
[ "def", "_dump_header", "(", "self", ")", ":", "with", "open", "(", "self", ".", "_file", ",", "'w'", ")", "as", "_file", ":", "_file", ".", "write", "(", "self", ".", "_hsrt", ")", "self", ".", "_sptr", "=", "_file", ".", "tell", "(", ")", "_file", ".", "write", "(", "self", ".", "_hend", ")" ]
Reloads the group object to synchronize with cloud representation .
def Refresh ( self ) : self . dirty = False self . data = clc . v2 . API . Call ( 'GET' , 'groups/%s/%s' % ( self . alias , self . id ) , session = self . session ) self . data [ 'changeInfo' ] [ 'createdDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'createdDate' ] ) self . data [ 'changeInfo' ] [ 'modifiedDate' ] = clc . v2 . time_utils . ZuluTSToSeconds ( self . data [ 'changeInfo' ] [ 'modifiedDate' ] )
6,963
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L152-L163
[ "def", "calcHairpin", "(", "seq", ",", "mv_conc", "=", "50.0", ",", "dv_conc", "=", "0.0", ",", "dntp_conc", "=", "0.8", ",", "dna_conc", "=", "50.0", ",", "temp_c", "=", "37", ",", "max_loop", "=", "30", ")", ":", "_setThermoArgs", "(", "*", "*", "locals", "(", ")", ")", "return", "_THERMO_ANALYSIS", ".", "calcHairpin", "(", "seq", ")", ".", "checkExc", "(", ")" ]
Returns default configurations for resources deployed to this group .
def Defaults ( self , key ) : if not hasattr ( self , 'defaults' ) : self . defaults = clc . v2 . API . Call ( 'GET' , 'groups/%s/%s/defaults' % ( self . alias , self . id ) , session = self . session ) try : return ( self . defaults [ key ] [ 'value' ] ) except : return ( None )
6,964
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L166-L181
[ "def", "StoreCSRFCookie", "(", "user", ",", "response", ")", ":", "csrf_token", "=", "GenerateCSRFToken", "(", "user", ",", "None", ")", "response", ".", "set_cookie", "(", "\"csrftoken\"", ",", "csrf_token", ",", "max_age", "=", "CSRF_TOKEN_DURATION", ".", "seconds", ")" ]
Returns a Groups object containing all child groups .
def Subgroups ( self ) : return ( Groups ( alias = self . alias , groups_lst = self . data [ 'groups' ] , session = self . session ) )
6,965
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L184-L192
[ "def", "issueViaEmail", "(", "self", ",", "issuer", ",", "email", ",", "product", ",", "templateData", ",", "domainName", ",", "httpPort", "=", "80", ")", ":", "ticket", "=", "self", ".", "createTicket", "(", "issuer", ",", "unicode", "(", "email", ",", "'ascii'", ")", ",", "product", ")", "nonce", "=", "ticket", ".", "nonce", "signupInfo", "=", "{", "'from'", ":", "'signup@'", "+", "domainName", ",", "'to'", ":", "email", ",", "'date'", ":", "rfc822", ".", "formatdate", "(", ")", ",", "'message-id'", ":", "smtp", ".", "messageid", "(", ")", ",", "'link'", ":", "self", ".", "ticketLink", "(", "domainName", ",", "httpPort", ",", "nonce", ")", "}", "msg", "=", "templateData", "%", "signupInfo", "return", "ticket", ",", "_sendEmail", "(", "signupInfo", "[", "'from'", "]", ",", "email", ",", "msg", ")" ]
Returns a Servers object containing all servers within the group .
def Servers ( self ) : return ( clc . v2 . Servers ( alias = self . alias , servers_lst = [ obj [ 'id' ] for obj in self . data [ 'links' ] if obj [ 'rel' ] == 'server' ] , session = self . session ) )
6,966
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/group.py#L224-L235
[ "def", "process_train_set", "(", "hdf5_file", ",", "train_archive", ",", "patch_archive", ",", "n_train", ",", "wnid_map", ",", "shuffle_seed", "=", "None", ")", ":", "producer", "=", "partial", "(", "train_set_producer", ",", "train_archive", "=", "train_archive", ",", "patch_archive", "=", "patch_archive", ",", "wnid_map", "=", "wnid_map", ")", "consumer", "=", "partial", "(", "image_consumer", ",", "hdf5_file", "=", "hdf5_file", ",", "num_expected", "=", "n_train", ",", "shuffle_seed", "=", "shuffle_seed", ")", "producer_consumer", "(", "producer", ",", "consumer", ")" ]
List of Queued requests and their current status details .
def List ( type = 'All' ) : r = clc . v1 . API . Call ( 'post' , 'Queue/ListQueueRequests' , { 'ItemStatusType' : Queue . item_status_type_map [ type ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Requests' ] )
6,967
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/queue.py#L18-L26
[ "def", "ignore_samples", "(", "self", ",", "data", ")", ":", "try", ":", "if", "isinstance", "(", "data", ",", "OrderedDict", ")", ":", "newdata", "=", "OrderedDict", "(", ")", "elif", "isinstance", "(", "data", ",", "dict", ")", ":", "newdata", "=", "dict", "(", ")", "else", ":", "return", "data", "for", "k", ",", "v", "in", "data", ".", "items", "(", ")", ":", "# Match ignore glob patterns", "glob_match", "=", "any", "(", "fnmatch", ".", "fnmatch", "(", "k", ",", "sn", ")", "for", "sn", "in", "config", ".", "sample_names_ignore", ")", "re_match", "=", "any", "(", "re", ".", "match", "(", "sn", ",", "k", ")", "for", "sn", "in", "config", ".", "sample_names_ignore_re", ")", "if", "not", "glob_match", "and", "not", "re_match", ":", "newdata", "[", "k", "]", "=", "v", "return", "newdata", "except", "(", "TypeError", ",", "AttributeError", ")", ":", "return", "data" ]
Load all networks associated with the given location .
def _Load ( self , location ) : # https://api.ctl.io/v2-experimental/networks/ALIAS/WA1 for network in clc . v2 . API . Call ( 'GET' , '/v2-experimental/networks/%s/%s' % ( self . alias , location ) , { } , session = self . session ) : self . networks . append ( Network ( id = network [ 'id' ] , alias = self . alias , network_obj = network , session = self . session ) )
6,968
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L51-L59
[ "def", "arrows", "(", "self", ")", ":", "if", "self", ".", "active_vectors", "is", "None", ":", "return", "arrow", "=", "vtk", ".", "vtkArrowSource", "(", ")", "arrow", ".", "Update", "(", ")", "alg", "=", "vtk", ".", "vtkGlyph3D", "(", ")", "alg", ".", "SetSourceData", "(", "arrow", ".", "GetOutput", "(", ")", ")", "alg", ".", "SetOrient", "(", "True", ")", "alg", ".", "SetInputData", "(", "self", ")", "alg", ".", "SetVectorModeToUseVector", "(", ")", "alg", ".", "SetScaleModeToScaleByVector", "(", ")", "alg", ".", "Update", "(", ")", "return", "vtki", ".", "wrap", "(", "alg", ".", "GetOutput", "(", ")", ")" ]
Get network by providing name ID or other unique key .
def Get ( self , key ) : for network in self . networks : try : if network . id == key : return ( network ) if network . name == key : return ( network ) if network . cidr == key : return ( network ) except : # We ignore malformed records with missing attributes pass
6,969
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L62-L76
[ "def", "apply", "(", "self", ")", ":", "self", ".", "read_group_info", "(", ")", "if", "self", ".", "tabs", ".", "count", "(", ")", "==", "0", ":", "# disactivate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "False", ")", "self", ".", "button_del", ".", "setEnabled", "(", "False", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "False", ")", "else", ":", "# activate buttons", "self", ".", "button_color", ".", "setEnabled", "(", "True", ")", "self", ".", "button_del", ".", "setEnabled", "(", "True", ")", "self", ".", "button_apply", ".", "setEnabled", "(", "True", ")", "if", "self", ".", "groups", ":", "self", ".", "parent", ".", "overview", ".", "update_position", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "update", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")", "else", ":", "self", ".", "parent", ".", "traces", ".", "reset", "(", ")", "self", ".", "parent", ".", "spectrum", ".", "reset", "(", ")", "self", ".", "parent", ".", "notes", ".", "enable_events", "(", ")" ]
Claims a new network within a given account .
def Create ( alias = None , location = None , session = None ) : if not alias : alias = clc . v2 . Account . GetAlias ( session = session ) if not location : location = clc . v2 . Account . GetLocation ( session = session ) return clc . v2 . Requests ( clc . v2 . API . Call ( 'POST' , '/v2-experimental/networks/%s/%s/claim' % ( alias , location ) , session = session ) , alias = alias , session = session )
6,970
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L102-L116
[ "def", "init", "(", "uri", ",", "echo", "=", "False", ")", ":", "global", "ENGINE", ",", "_METADATA", ",", "JOBS_TABLE", ",", "METADATA_TABLE", ",", "LOGS_TABLE", "ENGINE", "=", "sqlalchemy", ".", "create_engine", "(", "uri", ",", "echo", "=", "echo", ",", "convert_unicode", "=", "True", ")", "_METADATA", "=", "sqlalchemy", ".", "MetaData", "(", "ENGINE", ")", "JOBS_TABLE", "=", "_init_jobs_table", "(", ")", "METADATA_TABLE", "=", "_init_metadata_table", "(", ")", "LOGS_TABLE", "=", "_init_logs_table", "(", ")", "_METADATA", ".", "create_all", "(", "ENGINE", ")" ]
Releases the calling network .
def Delete ( self , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) return clc . v2 . API . Call ( 'POST' , '/v2-experimental/networks/%s/%s/%s/release' % ( self . alias , location , self . id ) , session = self . session )
6,971
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L118-L129
[ "def", "remove_mentions", "(", "self", ",", "string", ")", ":", "def", "replace", "(", "obj", ",", "*", ",", "transforms", "=", "self", ".", "MENTION_TRANSFORMS", ")", ":", "return", "transforms", ".", "get", "(", "obj", ".", "group", "(", "0", ")", ",", "'@invalid'", ")", "return", "self", ".", "MENTION_PATTERN", ".", "sub", "(", "replace", ",", "string", ")" ]
Updates the attributes of a given Network via PUT .
def Update ( self , name , description = None , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) payload = { 'name' : name } payload [ 'description' ] = description if description else self . description r = clc . v2 . API . Call ( 'PUT' , '/v2-experimental/networks/%s/%s/%s' % ( self . alias , location , self . id ) , payload , session = self . session ) self . name = self . data [ 'name' ] = name if description : self . data [ 'description' ] = description
6,972
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L131-L152
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
Reloads the network object to synchronize with cloud representation .
def Refresh ( self , location = None ) : if not location : location = clc . v2 . Account . GetLocation ( session = self . session ) new_object = clc . v2 . API . Call ( 'GET' , '/v2-experimental/networks/%s/%s/%s' % ( self . alias , location , self . id ) , session = self . session ) if new_object : self . name = new_object [ 'name' ] self . data = new_object
6,973
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv2/network.py#L154-L167
[ "def", "delete_topic_groups", "(", "self", ",", "group_id", ",", "topic_id", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - group_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"group_id\"", "]", "=", "group_id", "# REQUIRED - PATH - topic_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"topic_id\"", "]", "=", "topic_id", "self", ".", "logger", ".", "debug", "(", "\"DELETE /api/v1/groups/{group_id}/discussion_topics/{topic_id} with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"DELETE\"", ",", "\"/api/v1/groups/{group_id}/discussion_topics/{topic_id}\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
Iterate through build_requires list and pip install if package is not present accounting for version
def install_build_requires ( pkg_targets ) : def pip_install ( pkg_name , pkg_vers = None ) : pkg_name_version = '%s==%s' % ( pkg_name , pkg_vers ) if pkg_vers else pkg_name print '[WARNING] %s not found, attempting to install using a raw "pip install" call!' % pkg_name_version subprocess . Popen ( 'pip install %s' % pkg_name_version , shell = True ) . communicate ( ) def get_pkg_info ( pkg ) : """Get package name and version given a build_requires element""" pkg_name , pkg_vers = None , None if '==' in pkg : pkg_name , pkg_vers = pkg . split ( '==' ) else : pkg_name = pkg . replace ( '>' , '' ) . replace ( '<' , '' ) . split ( '=' ) [ 0 ] return pkg_name , pkg_vers for pkg in pkg_targets : pkg_name , pkg_vers = get_pkg_info ( pkg ) try : pkg_name_version = '%s==%s' % ( pkg_name , pkg_vers ) if pkg_vers else pkg_name if pkg_vers : version = getattr ( importlib . import_module ( pkg_name ) , '__version__' ) if version != pkg_vers : pip_install ( pkg_name , pkg_vers ) else : importlib . import_module ( pkg_name ) except ImportError : pip_install ( pkg_name , pkg_vers )
6,974
https://github.com/biocommons/uta-align/blob/c3af0a293bb6ce7d241a8ec9843e4a6bb18d751f/setup.py#L32-L61
[ "def", "info", "(", "self", ")", ":", "print", "(", "\"Sorry, but I don't have much to share.\"", ")", "print", "(", "\"This is me:\"", ")", "print", "(", "self", ")", "print", "(", "\"And these are the experiments assigned to me:\"", ")", "print", "(", "self", ".", "experiments", ")" ]
Starts the entire compilation procedure
def initiateCompilation ( args , file ) : ####commands = finalizeCommands(args, file) commands = makeCommands ( 0 , file ) if not args [ 'concise' ] and args [ 'print_args' ] : print_commands = bool ( args [ 'watch' ] ) response = multiCall ( * commands , print_commands = print_commands ) return response
6,975
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L24-L33
[ "def", "split", "(", "self", ",", "k", ")", ":", "if", "not", "1", "<=", "k", "<=", "self", ".", "num_rows", "-", "1", ":", "raise", "ValueError", "(", "\"Invalid value of k. k must be between 1 and the\"", "\"number of rows - 1\"", ")", "rows", "=", "np", ".", "random", ".", "permutation", "(", "self", ".", "num_rows", ")", "first", "=", "self", ".", "take", "(", "rows", "[", ":", "k", "]", ")", "rest", "=", "self", ".", "take", "(", "rows", "[", "k", ":", "]", ")", "for", "column_label", "in", "self", ".", "_formats", ":", "first", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "rest", ".", "_formats", "[", "column_label", "]", "=", "self", ".", "_formats", "[", "column_label", "]", "return", "first", ",", "rest" ]
Extracts and runs the
def run ( path , timer = False , repeat = 3 , number = 10000 , precision = 2 ) : code = extractAtCyther ( path ) if not code : output = "There was no '@cyther' code collected from the " "file '{}'\n" . format ( path ) # TODO This should use a result, right? return { 'returncode' : 0 , 'output' : output } module_directory = os . path . dirname ( path ) module_name = os . path . splitext ( os . path . basename ( path ) ) [ 0 ] setup_string = SETUP_TEMPLATE . format ( module_directory , module_name , '{}' ) if timer : string = TIMER_TEMPLATE . format ( setup_string , code , repeat , number , precision , '{}' ) else : string = setup_string + code script = os . path . join ( os . path . dirname ( __file__ ) , 'script.py' ) with open ( script , 'w+' ) as file : file . write ( string ) response = call ( [ 'python' , script ] ) return response
6,976
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L140-L166
[ "def", "_infer_type", "(", "obj", ")", ":", "if", "obj", "is", "None", ":", "return", "NullType", "(", ")", "if", "hasattr", "(", "obj", ",", "'__UDT__'", ")", ":", "return", "obj", ".", "__UDT__", "dataType", "=", "_type_mappings", ".", "get", "(", "type", "(", "obj", ")", ")", "if", "dataType", "is", "DecimalType", ":", "# the precision and scale of `obj` may be different from row to row.", "return", "DecimalType", "(", "38", ",", "18", ")", "elif", "dataType", "is", "not", "None", ":", "return", "dataType", "(", ")", "if", "isinstance", "(", "obj", ",", "dict", ")", ":", "for", "key", ",", "value", "in", "obj", ".", "items", "(", ")", ":", "if", "key", "is", "not", "None", "and", "value", "is", "not", "None", ":", "return", "MapType", "(", "_infer_type", "(", "key", ")", ",", "_infer_type", "(", "value", ")", ",", "True", ")", "return", "MapType", "(", "NullType", "(", ")", ",", "NullType", "(", ")", ",", "True", ")", "elif", "isinstance", "(", "obj", ",", "list", ")", ":", "for", "v", "in", "obj", ":", "if", "v", "is", "not", "None", ":", "return", "ArrayType", "(", "_infer_type", "(", "obj", "[", "0", "]", ")", ",", "True", ")", "return", "ArrayType", "(", "NullType", "(", ")", ",", "True", ")", "elif", "isinstance", "(", "obj", ",", "array", ")", ":", "if", "obj", ".", "typecode", "in", "_array_type_mappings", ":", "return", "ArrayType", "(", "_array_type_mappings", "[", "obj", ".", "typecode", "]", "(", ")", ",", "False", ")", "else", ":", "raise", "TypeError", "(", "\"not supported type: array(%s)\"", "%", "obj", ".", "typecode", ")", "else", ":", "try", ":", "return", "_infer_schema", "(", "obj", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "\"not supported type: %s\"", "%", "type", "(", "obj", ")", ")" ]
The heart of Cyther this function controls the main loop and can be used to perform any Cyther action . You can call if using Cyther from the module level
def core ( args ) : args = furtherArgsProcessing ( args ) numfiles = len ( args [ 'filenames' ] ) interval = INTERVAL / numfiles files = processFiles ( args ) while True : for file in files : cytherize ( args , file ) if not args [ 'watch' ] : break else : time . sleep ( interval )
6,977
https://github.com/nickpandolfi/Cyther/blob/9fb0bd77af594008aa6ee8af460aa8c953abf5bc/cyther/processing.py#L169-L186
[ "def", "create_cvmfs_storage_class", "(", "cvmfs_volume", ")", ":", "from", "kubernetes", ".", "client", ".", "rest", "import", "ApiException", "from", "reana_commons", ".", "k8s", ".", "api_client", "import", "current_k8s_storagev1_api_client", "try", ":", "current_k8s_storagev1_api_client", ".", "create_storage_class", "(", "render_cvmfs_sc", "(", "cvmfs_volume", ")", ")", "except", "ApiException", "as", "e", ":", "if", "e", ".", "status", "!=", "409", ":", "raise", "e" ]
Set the timestamp of the linguistic processor set to None for the current time
def set_timestamp ( self , timestamp = None ) : if timestamp is None : import time timestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'timestamp' , timestamp )
6,978
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L332-L341
[ "def", "mangle_volume", "(", "citation_elements", ")", ":", "volume_re", "=", "re", ".", "compile", "(", "ur\"(\\d+)([A-Z])\"", ",", "re", ".", "U", "|", "re", ".", "I", ")", "for", "el", "in", "citation_elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", ":", "matches", "=", "volume_re", ".", "match", "(", "el", "[", "'volume'", "]", ")", "if", "matches", ":", "el", "[", "'volume'", "]", "=", "matches", ".", "group", "(", "2", ")", "+", "matches", ".", "group", "(", "1", ")", "return", "citation_elements" ]
Set the begin timestamp of the linguistic processor set to None for the current time
def set_beginTimestamp ( self , btimestamp = None ) : if btimestamp is None : import time btimestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'beginTimestamp' , btimestamp )
6,979
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L352-L361
[ "def", "transition_matrix_non_reversible", "(", "C", ")", ":", "if", "not", "scipy", ".", "sparse", ".", "issparse", "(", "C", ")", ":", "C", "=", "scipy", ".", "sparse", ".", "csr_matrix", "(", "C", ")", "rowsum", "=", "C", ".", "tocsr", "(", ")", ".", "sum", "(", "axis", "=", "1", ")", "# catch div by zero", "if", "np", ".", "min", "(", "rowsum", ")", "==", "0.0", ":", "raise", "ValueError", "(", "\"matrix C contains rows with sum zero.\"", ")", "rowsum", "=", "np", ".", "array", "(", "1.", "/", "rowsum", ")", ".", "flatten", "(", ")", "norm", "=", "scipy", ".", "sparse", ".", "diags", "(", "rowsum", ",", "0", ")", "return", "norm", "*", "C" ]
Set the end timestamp of the linguistic processor set to None for the current time
def set_endTimestamp ( self , etimestamp = None ) : if etimestamp is None : import time etimestamp = time . strftime ( '%Y-%m-%dT%H:%M:%S%Z' ) self . node . set ( 'endTimestamp' , etimestamp )
6,980
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L372-L381
[ "def", "addNoiseToVector", "(", "inputVector", ",", "noiseLevel", ",", "vectorType", ")", ":", "if", "vectorType", "==", "'sparse'", ":", "corruptSparseVector", "(", "inputVector", ",", "noiseLevel", ")", "elif", "vectorType", "==", "'dense'", ":", "corruptDenseVector", "(", "inputVector", ",", "noiseLevel", ")", "else", ":", "raise", "ValueError", "(", "\"vectorType must be 'sparse' or 'dense' \"", ")" ]
Sets the publicId to the public object
def set_publicId ( self , publicId ) : publicObj = self . get_public ( ) if publicObj is not None : publicObj . set_publicid ( publicId ) else : publicObj = Cpublic ( ) publicObj . set_publicid ( publicId ) self . set_public ( publicObj )
6,981
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L559-L571
[ "def", "override_env_variables", "(", ")", ":", "env_vars", "=", "(", "\"LOGNAME\"", ",", "\"USER\"", ",", "\"LNAME\"", ",", "\"USERNAME\"", ")", "old", "=", "[", "os", ".", "environ", "[", "v", "]", "if", "v", "in", "os", ".", "environ", "else", "None", "for", "v", "in", "env_vars", "]", "for", "v", "in", "env_vars", ":", "os", ".", "environ", "[", "v", "]", "=", "\"test\"", "yield", "for", "i", ",", "v", "in", "enumerate", "(", "env_vars", ")", ":", "if", "old", "[", "i", "]", ":", "os", ".", "environ", "[", "v", "]", "=", "old", "[", "i", "]" ]
Sets the uri to the public object
def set_uri ( self , uri ) : publicObj = self . get_public ( ) if publicObj is not None : publicObj . set_uri ( uri ) else : publicObj = Cpublic ( ) publicObj . set_uri ( uri ) self . set_public ( publicObj )
6,982
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L585-L597
[ "def", "read", "(", "self", ")", ":", "line", "=", "self", ".", "trace_file", ".", "readline", "(", ")", "if", "line", "==", "''", ":", "if", "self", ".", "loop", ":", "self", ".", "_reopen_file", "(", ")", "else", ":", "self", ".", "trace_file", ".", "close", "(", ")", "self", ".", "trace_file", "=", "None", "raise", "DataSourceError", "(", ")", "message", "=", "JsonFormatter", ".", "deserialize", "(", "line", ")", "timestamp", "=", "message", ".", "get", "(", "'timestamp'", ",", "None", ")", "if", "self", ".", "realtime", "and", "timestamp", "is", "not", "None", ":", "self", ".", "_store_timestamp", "(", "timestamp", ")", "self", ".", "_wait", "(", "self", ".", "starting_time", ",", "self", ".", "first_timestamp", ",", "timestamp", ")", "return", "line", "+", "\"\\x00\"" ]
Removes the linguistic processors for a given layer
def remove_lp ( self , layer ) : for this_node in self . node . findall ( 'linguisticProcessors' ) : if this_node . get ( 'layer' ) == layer : self . node . remove ( this_node ) break
6,983
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L620-L629
[ "def", "prepare_file", "(", "filename", ")", ":", "directory", "=", "os", ".", "path", ".", "join", "(", "utils", ".", "get_project_root", "(", ")", ",", "\"analyzation/\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "directory", ")", ":", "os", ".", "makedirs", "(", "directory", ")", "workfilename", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "filename", ")", "open", "(", "workfilename", ",", "'w'", ")", ".", "close", "(", ")", "# Truncate the file", "return", "workfilename" ]
Adds a linguistic processor to a certain layer
def add_linguistic_processor ( self , layer , my_lp ) : ## Locate the linguisticProcessor element for taht layer found_lp_obj = None for this_lp in self . node . findall ( 'linguisticProcessors' ) : lp_obj = ClinguisticProcessors ( this_lp ) if lp_obj . get_layer ( ) == layer : found_lp_obj = lp_obj break if found_lp_obj is None : #Not found found_lp_obj = ClinguisticProcessors ( ) found_lp_obj . set_layer ( layer ) self . add_linguistic_processors ( found_lp_obj ) found_lp_obj . add_linguistic_processor ( my_lp )
6,984
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L631-L652
[ "def", "close", "(", "self", ")", "->", "None", ":", "if", "self", ".", "mmap", "is", "not", "None", ":", "self", ".", "mmap", ".", "close", "(", ")", "try", ":", "os", ".", "close", "(", "self", ".", "fd", ")", "except", "OSError", ":", "pass" ]
Returns the fileDesc object or None if there is no such element
def get_fileDesc ( self ) : node = self . node . find ( 'fileDesc' ) if node is not None : return CfileDesc ( node = node ) else : return None
6,985
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L654-L664
[ "def", "sync_druid_source", "(", "self", ")", ":", "payload", "=", "request", ".", "get_json", "(", "force", "=", "True", ")", "druid_config", "=", "payload", "[", "'config'", "]", "user_name", "=", "payload", "[", "'user'", "]", "cluster_name", "=", "payload", "[", "'cluster'", "]", "user", "=", "security_manager", ".", "find_user", "(", "username", "=", "user_name", ")", "DruidDatasource", "=", "ConnectorRegistry", ".", "sources", "[", "'druid'", "]", "DruidCluster", "=", "DruidDatasource", ".", "cluster_class", "if", "not", "user", ":", "err_msg", "=", "__", "(", "\"Can't find User '%(name)s', please ask your admin \"", "'to create one.'", ",", "name", "=", "user_name", ")", "logging", ".", "error", "(", "err_msg", ")", "return", "json_error_response", "(", "err_msg", ")", "cluster", "=", "db", ".", "session", ".", "query", "(", "DruidCluster", ")", ".", "filter_by", "(", "cluster_name", "=", "cluster_name", ")", ".", "first", "(", ")", "if", "not", "cluster", ":", "err_msg", "=", "__", "(", "\"Can't find DruidCluster with cluster_name = \"", "\"'%(name)s'\"", ",", "name", "=", "cluster_name", ")", "logging", ".", "error", "(", "err_msg", ")", "return", "json_error_response", "(", "err_msg", ")", "try", ":", "DruidDatasource", ".", "sync_to_db_from_config", "(", "druid_config", ",", "user", ",", "cluster", ")", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "utils", ".", "error_msg_from_exception", "(", "e", ")", ")", "return", "json_error_response", "(", "utils", ".", "error_msg_from_exception", "(", "e", ")", ")", "return", "Response", "(", "status", "=", "201", ")" ]
Returns the public object or None if there is no such element
def get_public ( self ) : node = self . node . find ( 'public' ) if node is not None : return Cpublic ( node = node ) else : return None
6,986
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/header_data.py#L674-L684
[ "def", "merge", "(", "self", ",", "workdir", ",", "gswfk_file", ",", "dfpt_files", ",", "gkk_files", ",", "out_gkk", ",", "binascii", "=", "0", ")", ":", "raise", "NotImplementedError", "(", "\"This method should be tested\"", ")", "#out_gkk = out_gkk if cwd is None else os.path.join(os.path.abspath(cwd), out_gkk)", "# We work with absolute paths.", "gswfk_file", "=", "os", ".", "path", ".", "absath", "(", "gswfk_file", ")", "dfpt_files", "=", "[", "os", ".", "path", ".", "abspath", "(", "s", ")", "for", "s", "in", "list_strings", "(", "dfpt_files", ")", "]", "gkk_files", "=", "[", "os", ".", "path", ".", "abspath", "(", "s", ")", "for", "s", "in", "list_strings", "(", "gkk_files", ")", "]", "print", "(", "\"Will merge %d 1WF files, %d GKK file in output %s\"", "%", "(", "len", "(", "dfpt_files", ")", ",", "len", "(", "gkk_files", ")", ",", "out_gkk", ")", ")", "if", "self", ".", "verbose", ":", "for", "i", ",", "f", "in", "enumerate", "(", "dfpt_files", ")", ":", "print", "(", "\" [%d] 1WF %s\"", "%", "(", "i", ",", "f", ")", ")", "for", "i", ",", "f", "in", "enumerate", "(", "gkk_files", ")", ":", "print", "(", "\" [%d] GKK %s\"", "%", "(", "i", ",", "f", ")", ")", "self", ".", "stdin_fname", ",", "self", ".", "stdout_fname", ",", "self", ".", "stderr_fname", "=", "map", "(", "os", ".", "path", ".", "join", ",", "3", "*", "[", "workdir", "]", ",", "[", "\"mrggkk.stdin\"", ",", "\"mrggkk.stdout\"", ",", "\"mrggkk.stderr\"", "]", ")", "inp", "=", "StringIO", "(", ")", "inp", ".", "write", "(", "out_gkk", "+", "\"\\n\"", ")", "# Name of the output file", "inp", ".", "write", "(", "str", "(", "binascii", ")", "+", "\"\\n\"", ")", "# Integer flag: 0 --> binary output, 1 --> ascii formatted output", "inp", ".", "write", "(", "gswfk_file", "+", "\"\\n\"", ")", "# Name of the groud state wavefunction file WF", "#dims = len(dfpt_files, gkk_files, ?)", "dims", "=", "\" \"", ".", "join", "(", "[", "str", "(", "d", ")", "for", "d", "in", "dims", "]", ")", "inp", ".", "write", "(", "dims", "+", "\"\\n\"", ")", "# Number of 1WF, of GKK files, and number of 1WF files in all the GKK files", "# Names of the 1WF files...", "for", "fname", "in", "dfpt_files", ":", "inp", ".", "write", "(", "fname", "+", "\"\\n\"", ")", "# Names of the GKK files...", "for", "fname", "in", "gkk_files", ":", "inp", ".", "write", "(", "fname", "+", "\"\\n\"", ")", "self", ".", "stdin_data", "=", "[", "s", "for", "s", "in", "inp", ".", "getvalue", "(", ")", "]", "with", "open", "(", "self", ".", "stdin_fname", ",", "\"w\"", ")", "as", "fh", ":", "fh", ".", "writelines", "(", "self", ".", "stdin_data", ")", "# Force OS to write data to disk.", "fh", ".", "flush", "(", ")", "os", ".", "fsync", "(", "fh", ".", "fileno", "(", ")", ")", "self", ".", "execute", "(", "workdir", ")", "return", "out_gkk" ]
Gets a list of Blueprint Packages filtered by classification and visibility .
def GetPackages ( classification , visibility ) : r = clc . v1 . API . Call ( 'post' , 'Blueprint/GetPackages' , { 'Classification' : Blueprint . classification_stoi [ classification ] , 'Visibility' : Blueprint . visibility_stoi [ visibility ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Packages' ] )
6,987
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L51-L61
[ "def", "addObserver", "(", "self", ",", "observer", ")", ":", "Observable", ".", "addObserver", "(", "self", ",", "observer", ")", "# If self.startOnDemand is True, the reader monitoring", "# thread only runs when there are observers.", "if", "self", ".", "startOnDemand", ":", "if", "0", "<", "self", ".", "countObservers", "(", ")", ":", "if", "not", "self", ".", "rmthread", ":", "self", ".", "rmthread", "=", "ReaderMonitoringThread", "(", "self", ",", "self", ".", "readerProc", ",", "self", ".", "period", ")", "# start reader monitoring thread in another thread to", "# avoid a deadlock; addObserver and notifyObservers called", "# in the ReaderMonitoringThread run() method are", "# synchronized", "try", ":", "# Python 3.x", "import", "_thread", "_thread", ".", "start_new_thread", "(", "self", ".", "rmthread", ".", "start", ",", "(", ")", ")", "except", ":", "# Python 2.x", "import", "thread", "thread", ".", "start_new_thread", "(", "self", ".", "rmthread", ".", "start", ",", "(", ")", ")", "else", ":", "observer", ".", "update", "(", "self", ",", "(", "self", ".", "rmthread", ".", "readers", ",", "[", "]", ")", ")" ]
Gets a list of all Blueprint Packages with a given classification .
def GetAllPackages ( classification ) : packages = [ ] for visibility in Blueprint . visibility_stoi . keys ( ) : try : for r in Blueprint . GetPackages ( classification , visibility ) : packages . append ( dict ( r . items ( ) + { 'Visibility' : visibility } . items ( ) ) ) except : pass if len ( packages ) : return ( packages )
6,988
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L65-L78
[ "def", "get_rsa_key", "(", "self", ",", "username", ")", ":", "try", ":", "resp", "=", "self", ".", "session", ".", "post", "(", "'https://steamcommunity.com/login/getrsakey/'", ",", "timeout", "=", "15", ",", "data", "=", "{", "'username'", ":", "username", ",", "'donotchache'", ":", "int", "(", "time", "(", ")", "*", "1000", ")", ",", "}", ",", ")", ".", "json", "(", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "raise", "HTTPError", "(", "str", "(", "e", ")", ")", "return", "resp" ]
Uploads specified zip package to cloud endpoint .
def PackageUpload ( package , ftp_url ) : #o = urlparse.urlparse(ftp_url) # Very weak URL checking #if o.scheme.lower() != "ftp": # clc.v1.output.Status('ERROR',2,'Invalid FTP URL') # return # Confirm file exists if not os . path . isfile ( package ) : clc . v1 . output . Status ( 'ERROR' , 2 , 'Package file (%s) not found' % ( package ) ) return m = re . search ( "ftp://(?P<user>.+?):(?P<passwd>.+?)@(?P<host>.+)" , ftp_url ) try : ftp = ftplib . FTP ( m . group ( 'host' ) , m . group ( 'user' ) , m . group ( 'passwd' ) ) file = open ( package , 'rb' ) filename = re . sub ( ".*/" , "" , package ) ftp . storbinary ( "STOR %s" % ( filename ) , file ) file . close ( ) ftp . quit ( ) clc . v1 . output . Status ( 'SUCCESS' , 2 , 'Blueprint package %s Uploaded' % ( filename ) ) except Exception as e : clc . v1 . output . Status ( 'ERROR' , 2 , 'FTP error %s: %s' % ( ftp_url , str ( e ) ) ) return ( { } )
6,989
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L107-L139
[ "def", "get_trust_id", "(", "self", ")", ":", "# Implemented from template for osid.resource.Resource.get_avatar_id_template", "if", "not", "bool", "(", "self", ".", "_my_map", "[", "'trustId'", "]", ")", ":", "raise", "errors", ".", "IllegalState", "(", "'this Authorization has no trust'", ")", "else", ":", "return", "Id", "(", "self", ".", "_my_map", "[", "'trustId'", "]", ")" ]
Publishes a Blueprint Package for use within the Blueprint Designer .
def PackagePublish ( package , classification , visibility , os ) : r = clc . v1 . API . Call ( 'post' , 'Blueprint/PublishPackage' , { 'Classification' : Blueprint . classification_stoi [ classification ] , 'Name' : package , 'OperatingSystems' : os , 'Visibility' : Blueprint . visibility_stoi [ visibility ] } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r )
6,990
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L143-L156
[ "def", "guest_unpause", "(", "self", ",", "userid", ")", ":", "action", "=", "\"unpause guest '%s'\"", "%", "userid", "with", "zvmutils", ".", "log_and_reraise_sdkbase_error", "(", "action", ")", ":", "self", ".", "_vmops", ".", "guest_unpause", "(", "userid", ")" ]
Publishes a Blueprint Package for use within the Blueprint Designer after interactive OS selection .
def PackagePublishUI ( package , type , visibility ) : # fetch OS list linux_lst = { 'L' : { 'selected' : False , 'Description' : 'All Linux' } } windows_lst = { 'W' : { 'selected' : False , 'Description' : 'All Windows' } } for r in clc . v1 . Server . GetTemplates ( ) : r [ 'selected' ] = False if re . search ( "Windows" , r [ 'Description' ] ) : windows_lst [ str ( r [ 'OperatingSystem' ] ) ] = r elif re . search ( "CentOS|RedHat|Ubuntu" , r [ 'Description' ] ) : linux_lst [ str ( r [ 'OperatingSystem' ] ) ] = r # Get selections if os . name == 'posix' : scr = curses . initscr ( ) curses . cbreak ( ) while True : if os . name == 'posix' : c = Blueprint . _DrawPublishPackageUIPosix ( scr , linux_lst , windows_lst ) else : c = Blueprint . _DrawPublishPackageUI ( linux_lst , windows_lst ) if c . lower ( ) == 'q' : break elif c . lower ( ) == 'l' : for l in linux_lst : linux_lst [ l ] [ 'selected' ] = not linux_lst [ l ] [ 'selected' ] elif c . lower ( ) == 'w' : for l in windows_lst : windows_lst [ l ] [ 'selected' ] = not windows_lst [ l ] [ 'selected' ] elif c in linux_lst : linux_lst [ c ] [ 'selected' ] = not linux_lst [ c ] [ 'selected' ] elif c in windows_lst : windows_lst [ c ] [ 'selected' ] = not windows_lst [ c ] [ 'selected' ] if os . name == 'posix' : curses . nocbreak ( ) curses . echo ( ) curses . endwin ( ) # Extract selections ids = [ ] for l in dict ( linux_lst . items ( ) + windows_lst . items ( ) ) . values ( ) : if l [ 'selected' ] and 'OperatingSystem' in l : ids . append ( str ( l [ 'OperatingSystem' ] ) ) clc . v1 . output . Status ( 'SUCCESS' , 2 , 'Selected operating system IDs: %s' % ( " " . join ( ids ) ) ) return ( Blueprint . PackagePublish ( package , type , visibility , ids ) )
6,991
https://github.com/CenturyLinkCloud/clc-python-sdk/blob/f4dba40c627cb08dd4b7d0d277e8d67578010b05/src/clc/APIv1/blueprint.py#L214-L255
[ "def", "ensure_cache_id", "(", "self", ",", "cache_id_obj", ")", ":", "cache_id", "=", "self", ".", "_get_canonical_id", "(", "cache_id_obj", ")", "if", "cache_id", "!=", "self", ".", "_cache_id_obj", ":", "raise", "ValueError", "(", "\"cache mismatch {0} != {1}\"", ".", "format", "(", "cache_id", ",", "self", ".", "_cache_id_obj", ")", ")" ]
Return the members of the archive as a list of RPMInfo objects . The list has the same order as the members in the archive .
def getmembers ( self ) : if self . _members is None : self . _members = _members = [ ] g = self . data_file magic = g . read ( 2 ) while magic : if magic == b'07' : magic += g . read ( 4 ) member = RPMInfo . _read ( magic , g ) if member . name == 'TRAILER!!!' : break if not member . isdir : _members . append ( member ) magic = g . read ( 2 ) return _members return self . _members
6,992
https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L111-L133
[ "def", "_connect", "(", "self", ")", ":", "try", ":", "# Open Connection", "self", ".", "influx", "=", "InfluxDBClient", "(", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "username", ",", "self", ".", "password", ",", "self", ".", "database", ",", "self", ".", "ssl", ")", "# Log", "self", ".", "log", ".", "debug", "(", "\"InfluxdbHandler: Established connection to \"", "\"%s:%d/%s.\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ")", "except", "Exception", "as", "ex", ":", "# Log Error", "self", ".", "_throttle_error", "(", "\"InfluxdbHandler: Failed to connect to \"", "\"%s:%d/%s. %s\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ",", "ex", ")", "# Close Socket", "self", ".", "_close", "(", ")", "return" ]
Return an RPMInfo object for member name . If name can not be found in the archive KeyError is raised . If a member occurs more than once in the archive its last occurrence is assumed to be the most up - to - date version .
def getmember ( self , name ) : members = self . getmembers ( ) for m in members [ : : - 1 ] : if m . name == name : return m raise KeyError ( "member %s could not be found" % name )
6,993
https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L135-L147
[ "def", "bind_to_storage_buffer", "(", "self", ",", "binding", "=", "0", ",", "*", ",", "offset", "=", "0", ",", "size", "=", "-", "1", ")", "->", "None", ":", "self", ".", "mglo", ".", "bind_to_storage_buffer", "(", "binding", ",", "offset", ",", "size", ")" ]
Return the uncompressed raw CPIO data of the RPM archive .
def data_file ( self ) : if self . _data_file is None : fileobj = _SubFile ( self . _fileobj , self . data_offset ) if self . headers [ "archive_compression" ] == b"xz" : if not getattr ( sys . modules [ __name__ ] , 'lzma' , False ) : raise NoLZMAModuleError ( 'lzma module not present' ) self . _data_file = lzma . LZMAFile ( fileobj ) else : self . _data_file = gzip . GzipFile ( fileobj = fileobj ) return self . _data_file
6,994
https://github.com/srossross/rpmfile/blob/3ab96f211da7b56f5e99d8cc248f714a6e542d31/rpmfile/__init__.py#L163-L176
[ "def", "_save_token_on_disk", "(", "self", ")", ":", "token", "=", "self", ".", "_token", ".", "copy", "(", ")", "# Client secret is needed for token refreshing and isn't returned", "# as a pared of OAuth token by default", "token", ".", "update", "(", "client_secret", "=", "self", ".", "_client_secret", ")", "with", "codecs", ".", "open", "(", "config", ".", "TOKEN_FILE_PATH", ",", "'w'", ",", "'utf8'", ")", "as", "f", ":", "json", ".", "dump", "(", "token", ",", "f", ",", "ensure_ascii", "=", "False", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ",", ")" ]
Load and parse input if needed .
def load_input ( definition ) : if isinstance ( definition , ( str , io . TextIOWrapper ) ) : try : definition = yaml . safe_load ( definition ) except Exception as exc : raise ParsingInputError ( "Unable to parse input: %s" % str ( exc ) ) return definition
6,995
https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/utils.py#L19-L32
[ "def", "handle_not_found", "(", "exception", ",", "*", "*", "extra", ")", ":", "assert", "isinstance", "(", "exception", ",", "NotFound", ")", "page", "=", "Page", ".", "query", ".", "filter", "(", "db", ".", "or_", "(", "Page", ".", "url", "==", "request", ".", "path", ",", "Page", ".", "url", "==", "request", ".", "path", "+", "\"/\"", ")", ")", ".", "first", "(", ")", "if", "page", ":", "_add_url_rule", "(", "page", ".", "url", ")", "return", "render_template", "(", "[", "page", ".", "template_name", ",", "current_app", ".", "config", "[", "'PAGES_DEFAULT_TEMPLATE'", "]", "]", ",", "page", "=", "page", ")", "elif", "'wrapped'", "in", "extra", ":", "return", "extra", "[", "'wrapped'", "]", "(", "exception", ")", "else", ":", "return", "exception" ]
Handle general to SQL conversion .
def any2sql ( func , definition_dict = None , * * definition_kwargs ) : if definition_dict and definition_kwargs : raise InputError ( "Cannot process dict and kwargs input at the same time" ) definition = load_input ( definition_dict or definition_kwargs ) if definition . get ( 'returning' , '' ) == '*' : definition [ 'returning' ] = mosql_raw ( '*' ) try : result = func ( * * definition ) except ( TypeError , AttributeError ) as exc : raise ClauseError ( "Clause definition error: %s" % str ( exc ) ) from exc except Exception as exc : import json2sql . errors as json2sql_errors if exc . __class__ . __name__ not in json2sql_errors . __dict__ . keys ( ) : raise json2sql_errors . Json2SqlInternalError ( "Unhandled error: %s" % str ( exc ) ) from exc raise return result
6,996
https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/utils.py#L35-L62
[ "def", "find_stream", "(", "cls", ",", "fileobj", ",", "max_bytes", ")", ":", "r", "=", "BitReader", "(", "fileobj", ")", "stream", "=", "cls", "(", "r", ")", "if", "stream", ".", "sync", "(", "max_bytes", ")", ":", "stream", ".", "offset", "=", "(", "r", ".", "get_position", "(", ")", "-", "12", ")", "//", "8", "return", "stream" ]
Expand join definition to join call .
def _expand_join ( join_definition ) : join_table_name = join_definition . pop ( 'table' ) join_func = getattr ( mosql_query , join_definition . pop ( 'join_type' , 'join' ) ) return join_func ( join_table_name , * * join_definition )
6,997
https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/select.py#L32-L40
[ "def", "get_directory_properties", "(", "self", ",", "share_name", ",", "directory_name", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_not_none", "(", "'directory_name'", ",", "directory_name", ")", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'GET'", "request", ".", "host_locations", "=", "self", ".", "_get_host_locations", "(", ")", "request", ".", "path", "=", "_get_path", "(", "share_name", ",", "directory_name", ")", "request", ".", "query", "=", "{", "'restype'", ":", "'directory'", ",", "'timeout'", ":", "_int_to_str", "(", "timeout", ")", ",", "'sharesnapshot'", ":", "_to_str", "(", "snapshot", ")", "}", "return", "self", ".", "_perform_request", "(", "request", ",", "_parse_directory", ",", "[", "directory_name", "]", ")" ]
Return SELECT statement that will be used as a filter .
def _construct_select_query ( * * filter_definition ) : table_name = filter_definition . pop ( 'table' ) distinct = filter_definition . pop ( 'distinct' , False ) select_count = filter_definition . pop ( 'count' , False ) if distinct and select_count : raise UnsupportedDefinitionError ( 'SELECT (DISTINCT ...) is not supported' ) if select_count and 'select' in filter_definition : raise UnsupportedDefinitionError ( 'SELECT COUNT(columns) is not supported' ) if 'joins' in filter_definition : join_definitions = filter_definition . pop ( 'joins' ) if not isinstance ( join_definitions , ( tuple , list ) ) : join_definitions = ( join_definitions , ) filter_definition [ 'joins' ] = [ ] for join_def in join_definitions : filter_definition [ 'joins' ] . append ( _expand_join ( join_def ) ) if 'where' in filter_definition : for key , value in filter_definition [ 'where' ] . items ( ) : if is_filter_query ( value ) : # We can do it recursively here sub_query = value . pop ( DEFAULT_FILTER_KEY ) if value : raise ParsingInputError ( "Unknown keys for sub-query provided: %s" % value ) filter_definition [ 'where' ] [ key ] = mosql_raw ( '( {} )' . format ( _construct_select_query ( * * sub_query ) ) ) elif isinstance ( value , str ) and value . startswith ( '$' ) and QUERY_REFERENCE . fullmatch ( value [ 1 : ] ) : # Make sure we construct correct query with escaped table name and escaped column for sub-queries filter_definition [ 'where' ] [ key ] = mosql_raw ( '"{}"' . format ( '"."' . join ( value [ 1 : ] . split ( '.' ) ) ) ) raw_select = select ( table_name , * * filter_definition ) if distinct : # Note that we want to limit replace to the current SELECT, not affect nested ones raw_select = raw_select . replace ( 'SELECT' , 'SELECT DISTINCT' , 1 ) if select_count : # Note that we want to limit replace to the current SELECT, not affect nested ones raw_select = raw_select . replace ( 'SELECT *' , 'SELECT COUNT(*)' , 1 ) return raw_select
6,998
https://github.com/fridex/json2sql/blob/a0851dd79827a684319b03fb899e129f81ff2d3a/json2sql/select.py#L43-L90
[ "def", "load_external_data_for_tensor", "(", "tensor", ",", "base_dir", ")", ":", "# type: (TensorProto, Text) -> None", "if", "tensor", ".", "HasField", "(", "\"raw_data\"", ")", ":", "# already loaded", "return", "info", "=", "ExternalDataInfo", "(", "tensor", ")", "file_location", "=", "_sanitize_path", "(", "info", ".", "location", ")", "external_data_file_path", "=", "os", ".", "path", ".", "join", "(", "base_dir", ",", "file_location", ")", "with", "open", "(", "external_data_file_path", ",", "'rb'", ")", "as", "data_file", ":", "if", "info", ".", "offset", ":", "data_file", ".", "seek", "(", "info", ".", "offset", ")", "if", "info", ".", "length", ":", "tensor", ".", "raw_data", "=", "data_file", ".", "read", "(", "info", ".", "length", ")", "else", ":", "tensor", ".", "raw_data", "=", "data_file", ".", "read", "(", ")" ]
initiate a new connection to a remote socket bound to an address
def connect ( self , address ) : address = _dns_resolve ( self , address ) with self . _registered ( 'we' ) : while 1 : err = self . _sock . connect_ex ( address ) if not self . _blocking or err not in _BLOCKING_OP : if err not in ( 0 , errno . EISCONN ) : raise socket . error ( err , errno . errorcode [ err ] ) return if self . _writable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
6,999
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L150-L172
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]