query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Reconnect to JLigier and subscribe to the tags .
def _reconnect ( self ) : log . debug ( "Reconnecting to JLigier..." ) self . _disconnect ( ) self . _connect ( ) self . _update_subscriptions ( )
10,100
https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/controlhost.py#L136-L141
[ "def", "prepare_request", "(", "self", ",", "kwargs", ")", ":", "if", "self", ".", "session_id", "is", "not", "None", ":", "actual", "=", "dt", ".", "datetime", ".", "now", "(", ")", "if", "(", "self", ".", "last_activity", "+", "self", ".", "inactivity_limit", ")", "<", "actual", ":", "# session expired", "if", "self", ".", "persist", ":", "# request new session", "self", ".", "create_session", "(", ")", "actual", "=", "dt", ".", "datetime", ".", "now", "(", ")", "else", ":", "# raise error and recommend to manualy refresh session", "raise", "RuntimeError", "(", "'A non Persistent Session is expired. '", "'For consistency reasons this exception is raised. '", "'Please try again with manual refresh of the session '", ")", "self", ".", "last_activity", "=", "actual", "headers", "=", "kwargs", ".", "get", "(", "'headers'", ")", "if", "headers", "is", "None", ":", "kwargs", "[", "'headers'", "]", "=", "headers", "=", "{", "}", "headers", "[", "'workbook-session-id'", "]", "=", "self", ".", "session_id" ]
Set the byte data and fill up the bytes to fit the size .
def data ( self , value ) : if not value : value = b'' if len ( value ) > self . SIZE : raise ValueError ( "The maximum tag size is {0}" . format ( self . SIZE ) ) self . _data = value while len ( self . _data ) < self . SIZE : self . _data += b'\x00'
10,101
https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/controlhost.py#L177-L185
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Set values in constant
def add ( self , name , attr = None , value = None ) : if isinstance ( name , tuple ) or isinstance ( name , list ) : name , attr , value = self . __set_iter_value ( name ) if attr is None : attr = name if value is None : value = attr self . __data += ( self . get_const_string ( name = name , value = value ) , ) # set attribute as slugfiy self . __dict__ [ s_attr ( attr ) ] = self . __data [ - 1 ]
10,102
https://github.com/valdergallo/pyconst/blob/af4cbc8d91ffab601ac5e45e5480f20c5462064d/pyconst/const.py#L54-L68
[ "def", "namer", "(", "cls", ",", "imageUrl", ",", "pageUrl", ")", ":", "start", "=", "''", "tsmatch", "=", "compile", "(", "r'/(\\d+)-'", ")", ".", "search", "(", "imageUrl", ")", "if", "tsmatch", ":", "start", "=", "datetime", ".", "utcfromtimestamp", "(", "int", "(", "tsmatch", ".", "group", "(", "1", ")", ")", ")", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", "else", ":", "# There were only chapter 1, page 4 and 5 not matching when writing", "# this...", "start", "=", "'2015-04-11x'", "return", "start", "+", "\"-\"", "+", "pageUrl", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]" ]
Start IOLoop in daemonized thread .
def start ( self ) : assert self . _thread is None , 'thread already started' # configure thread self . _thread = Thread ( target = self . _start_io_loop ) self . _thread . daemon = True # begin thread and block until ready self . _thread . start ( ) self . _ready . wait ( )
10,103
https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L56-L66
[ "def", "source_start", "(", "base", "=", "''", ",", "book_id", "=", "'book'", ")", ":", "repo_htm_path", "=", "\"{book_id}-h/{book_id}-h.htm\"", ".", "format", "(", "book_id", "=", "book_id", ")", "possible_paths", "=", "[", "\"book.asciidoc\"", ",", "repo_htm_path", ",", "\"{}-0.txt\"", ".", "format", "(", "book_id", ")", ",", "\"{}-8.txt\"", ".", "format", "(", "book_id", ")", ",", "\"{}.txt\"", ".", "format", "(", "book_id", ")", ",", "\"{}-pdf.pdf\"", ".", "format", "(", "book_id", ")", ",", "]", "# return the first match", "for", "path", "in", "possible_paths", ":", "fullpath", "=", "os", ".", "path", ".", "join", "(", "base", ",", "path", ")", "if", "os", ".", "path", ".", "exists", "(", "fullpath", ")", ":", "return", "path", "return", "None" ]
Start IOLoop then set ready threading . Event .
def _start_io_loop ( self ) : def mark_as_ready ( ) : self . _ready . set ( ) if not self . _io_loop : self . _io_loop = ioloop . IOLoop ( ) self . _io_loop . add_callback ( mark_as_ready ) self . _io_loop . start ( )
10,104
https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L68-L78
[ "def", "normalize_filename", "(", "filename", ")", ":", "# if the url pointed to a directory then just replace all the special chars", "filename", "=", "re", ".", "sub", "(", "\"/|\\\\|;|:|\\?|=\"", ",", "\"_\"", ",", "filename", ")", "if", "len", "(", "filename", ")", ">", "150", ":", "prefix", "=", "hashlib", ".", "md5", "(", "filename", ")", ".", "hexdigest", "(", ")", "filename", "=", "prefix", "+", "filename", "[", "-", "140", ":", "]", "return", "filename" ]
Is thread & ioloop ready .
def is_ready ( self ) : if not self . _thread : return False if not self . _ready . is_set ( ) : return False return True
10,105
https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L80-L91
[ "def", "verse_lookup", "(", "self", ",", "book_name", ",", "book_chapter", ",", "verse", ",", "cache_chapter", "=", "True", ")", ":", "verses_list", "=", "self", ".", "get_chapter", "(", "book_name", ",", "str", "(", "book_chapter", ")", ",", "cache_chapter", "=", "cache_chapter", ")", "return", "verses_list", "[", "int", "(", "verse", ")", "-", "1", "]" ]
Submit Tornado Coroutine to IOLoop in daemonized thread .
def submit ( self , fn , * args , * * kwargs ) : if not self . is_ready ( ) : raise ThreadNotStartedError ( "The thread has not been started yet, " "make sure you call start() first" ) future = Future ( ) def execute ( ) : """Executes fn on the IOLoop.""" try : result = gen . maybe_future ( fn ( * args , * * kwargs ) ) except Exception : # The function we ran didn't return a future and instead raised # an exception. Let's pretend that it returned this dummy # future with our stack trace. f = gen . Future ( ) f . set_exc_info ( sys . exc_info ( ) ) on_done ( f ) else : result . add_done_callback ( on_done ) def on_done ( f ) : """Sets tornado.Future results to the concurrent.Future.""" if not f . exception ( ) : future . set_result ( f . result ( ) ) return # if f is a tornado future, then it has exc_info() if hasattr ( f , 'exc_info' ) : exception , traceback = f . exc_info ( ) [ 1 : ] # else it's a concurrent.future else : # python2's concurrent.future has exception_info() if hasattr ( f , 'exception_info' ) : exception , traceback = f . exception_info ( ) # python3's concurrent.future just has exception() else : exception = f . exception ( ) traceback = None # python2 needs exc_info set explicitly if _FUTURE_HAS_EXC_INFO : future . set_exception_info ( exception , traceback ) return # python3 just needs the exception, exc_info works fine future . set_exception ( exception ) self . _io_loop . add_callback ( execute ) return future
10,106
https://github.com/breerly/threadloop/blob/9d4f83660fd79fe15d741b831be9ee28dccacc30/threadloop/threadloop.py#L98-L160
[ "def", "subdir_findall", "(", "dir", ",", "subdir", ")", ":", "strip_n", "=", "len", "(", "dir", ".", "split", "(", "'/'", ")", ")", "path", "=", "'/'", ".", "join", "(", "(", "dir", ",", "subdir", ")", ")", "return", "[", "'/'", ".", "join", "(", "s", ".", "split", "(", "'/'", ")", "[", "strip_n", ":", "]", ")", "for", "s", "in", "setuptools", ".", "findall", "(", "path", ")", "]" ]
Return peak memory usage in MB
def peak_memory_usage ( ) : if sys . platform . startswith ( 'win' ) : p = psutil . Process ( ) return p . memory_info ( ) . peak_wset / 1024 / 1024 mem = resource . getrusage ( resource . RUSAGE_SELF ) . ru_maxrss factor_mb = 1 / 1024 if sys . platform == 'darwin' : factor_mb = 1 / ( 1024 * 1024 ) return mem * factor_mb
10,107
https://github.com/tamasgal/km3pipe/blob/7a9b59ac899a28775b5bdc5d391d9a5340d08040/km3pipe/sys.py#L41-L51
[ "def", "_read_columns_file", "(", "f", ")", ":", "try", ":", "columns", "=", "json", ".", "loads", "(", "open", "(", "f", ",", "'r'", ")", ".", "read", "(", ")", ",", "object_pairs_hook", "=", "collections", ".", "OrderedDict", ")", "except", "Exception", "as", "err", ":", "raise", "InvalidColumnsFileError", "(", "\"There was an error while reading {0}: {1}\"", ".", "format", "(", "f", ",", "err", ")", ")", "# Options are not supported yet:", "if", "'__options'", "in", "columns", ":", "del", "columns", "[", "'__options'", "]", "return", "columns" ]
Returns a list of the number of times each preference is given .
def getPreferenceCounts ( self ) : preferenceCounts = [ ] for preference in self . preferences : preferenceCounts . append ( preference . count ) return preferenceCounts
10,108
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L64-L72
[ "def", "parse_message", "(", "message", ",", "nodata", "=", "False", ")", ":", "header", "=", "read_machine_header", "(", "message", ")", "h_len", "=", "__get_machine_header_length", "(", "header", ")", "meta_raw", "=", "message", "[", "h_len", ":", "h_len", "+", "header", "[", "'meta_len'", "]", "]", "meta", "=", "__parse_meta", "(", "meta_raw", ",", "header", ")", "data_start", "=", "h_len", "+", "header", "[", "'meta_len'", "]", "data", "=", "b''", "if", "not", "nodata", ":", "data", "=", "__decompress", "(", "meta", ",", "message", "[", "data_start", ":", "data_start", "+", "header", "[", "'data_len'", "]", "]", ")", "return", "header", ",", "meta", ",", "data" ]
Returns a list of dictionaries one for each preference that associates the integer representation of each candidate with its position in the ranking starting from 1 and returns a list of the number of times each preference is given .
def getRankMaps ( self ) : rankMaps = [ ] for preference in self . preferences : rankMaps . append ( preference . getRankMap ( ) ) return rankMaps
10,109
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L74-L84
[ "def", "area", "(", "self", ")", ":", "mprop", "=", "vtk", ".", "vtkMassProperties", "(", ")", "mprop", ".", "SetInputData", "(", "self", ")", "return", "mprop", ".", "GetSurfaceArea", "(", ")" ]
Returns a list of dictionaries one for each preference that associates each position in the ranking with a list of integer representations of the candidates ranked at that position and returns a list of the number of times each preference is given .
def getReverseRankMaps ( self ) : reverseRankMaps = [ ] for preference in self . preferences : reverseRankMaps . append ( preference . getReverseRankMap ( ) ) return reverseRankMaps
10,110
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L85-L95
[ "def", "_filter_schemas", "(", "schemas", ",", "schema_tables", ",", "exclude_table_columns", ")", ":", "return", "[", "_filter_schema", "(", "s", ",", "schema_tables", ",", "exclude_table_columns", ")", "for", "s", "in", "schemas", "]" ]
Exports a preflib format file that contains all the information of the current Profile .
def exportPreflibFile ( self , fileName ) : elecType = self . getElecType ( ) if elecType != "soc" and elecType != "toc" and elecType != "soi" and elecType != "toi" : print ( "ERROR: printing current type to preflib format is not supported" ) exit ( ) # Generate a list of reverse rankMaps, one for each vote. This will allow us to easiliy # identify ties. reverseRankMaps = self . getReverseRankMaps ( ) outfileObj = open ( fileName , 'w' ) # Print the number of candidates and the integer representation and name of each candidate. outfileObj . write ( str ( self . numCands ) ) for candInt , cand in self . candMap . items ( ) : outfileObj . write ( "\n" + str ( candInt ) + "," + cand ) # Sum up the number of preferences that are represented. preferenceCount = 0 for preference in self . preferences : preferenceCount += preference . count # Print the number of voters, the sum of vote count, and the number of unique orders. outfileObj . write ( "\n" + str ( self . numVoters ) + "," + str ( preferenceCount ) + "," + str ( len ( self . preferences ) ) ) for i in range ( 0 , len ( reverseRankMaps ) ) : # First, print the number of times the preference appears. outfileObj . write ( "\n" + str ( self . preferences [ i ] . count ) ) reverseRankMap = reverseRankMaps [ i ] # We sort the positions in increasing order and print the candidates at each position # in order. sortedKeys = sorted ( reverseRankMap . keys ( ) ) for key in sortedKeys : cands = reverseRankMap [ key ] # If only one candidate is in a particular position, we assume there is no tie. if len ( cands ) == 1 : outfileObj . write ( "," + str ( cands [ 0 ] ) ) # If more than one candidate is in a particular position, they are tied. We print # brackets around the candidates. elif len ( cands ) > 1 : outfileObj . write ( ",{" + str ( cands [ 0 ] ) ) for j in range ( 1 , len ( cands ) ) : outfileObj . write ( "," + str ( cands [ j ] ) ) outfileObj . write ( "}" ) outfileObj . close ( )
10,111
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L201-L259
[ "def", "update_sandbox_product", "(", "self", ",", "product_id", ",", "surge_multiplier", "=", "None", ",", "drivers_available", "=", "None", ",", ")", ":", "args", "=", "{", "'surge_multiplier'", ":", "surge_multiplier", ",", "'drivers_available'", ":", "drivers_available", ",", "}", "endpoint", "=", "'v1.2/sandbox/products/{}'", ".", "format", "(", "product_id", ")", "return", "self", ".", "_api_call", "(", "'PUT'", ",", "endpoint", ",", "args", "=", "args", ")" ]
Imports a preflib format file that contains all the information of a Profile . This function will completely override all members of the current Profile object . Currently we assume that in an election where incomplete ordering are allowed if a voter ranks only one candidate then the voter did not prefer any candidates over another . This may lead to some discrepancies when importing and exporting a . toi preflib file or a . soi preflib file .
def importPreflibFile ( self , fileName ) : # Use the functionality found in io to read the file. elecFileObj = open ( fileName , 'r' ) self . candMap , rankMaps , wmgMapsCounts , self . numVoters = prefpy_io . read_election_file ( elecFileObj ) elecFileObj . close ( ) self . numCands = len ( self . candMap . keys ( ) ) # Go through the rankMaps and generate a wmgMap for each vote. Use the wmgMap to create a # Preference object. self . preferences = [ ] for i in range ( 0 , len ( rankMaps ) ) : wmgMap = self . genWmgMapFromRankMap ( rankMaps [ i ] ) self . preferences . append ( Preference ( wmgMap , wmgMapsCounts [ i ] ) )
10,112
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L261-L284
[ "def", "parse_content", "(", "self", ",", "content", ")", ":", "self", ".", "lines", "=", "content", "for", "scanner", "in", "self", ".", "scanners", ":", "scanner", "(", "self", ")" ]
Exports a json file that contains all the information of the current Profile .
def exportJsonFile ( self , fileName ) : # Because our Profile class is not directly JSON serializable, we exporrt the underlying # dictionary. data = dict ( ) for key in self . __dict__ . keys ( ) : if key != "preferences" : data [ key ] = self . __dict__ [ key ] # The Preference class is also not directly JSON serializable, so we export the underlying # dictionary for each Preference object. preferenceDicts = [ ] for preference in self . preferences : preferenceDict = dict ( ) for key in preference . __dict__ . keys ( ) : preferenceDict [ key ] = preference . __dict__ [ key ] preferenceDicts . append ( preferenceDict ) data [ "preferences" ] = preferenceDicts outfile = open ( fileName , 'w' ) json . dump ( data , outfile ) outfile . close ( )
10,113
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L286-L312
[ "async", "def", "_unsubscribe", "(", "self", ",", "channels", ",", "is_mask", ")", ":", "vanished", "=", "[", "]", "if", "channels", ":", "for", "channel", "in", "channels", ":", "key", "=", "channel", ",", "is_mask", "self", ".", "_channels", ".", "remove", "(", "key", ")", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ".", "remove", "(", "self", ".", "_queue", ")", "if", "not", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ":", "# we were last sub?", "vanished", ".", "append", "(", "channel", ")", "del", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", "else", ":", "while", "self", ".", "_channels", ":", "channel", ",", "is_mask", "=", "key", "=", "self", ".", "_channels", ".", "pop", "(", ")", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ".", "remove", "(", "self", ".", "_queue", ")", "if", "not", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ":", "vanished", ".", "append", "(", "channel", ")", "del", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", "if", "vanished", ":", "await", "getattr", "(", "self", ".", "_sub", ",", "'punsubscribe'", "if", "is_mask", "else", "'unsubscribe'", ")", "(", "vanished", ")" ]
Imports a json file that contains all the information of a Profile . This function will completely override all members of the current Profile object .
def importJsonFile ( self , fileName ) : infile = open ( fileName ) data = json . load ( infile ) infile . close ( ) self . numCands = int ( data [ "numCands" ] ) self . numVoters = int ( data [ "numVoters" ] ) # Because the json.load function imports everything as unicode strings, we will go through # the candMap dictionary and convert all the keys to integers and convert all the values to # ascii strings. candMap = dict ( ) for key in data [ "candMap" ] . keys ( ) : candMap [ int ( key ) ] = data [ "candMap" ] [ key ] . encode ( "ascii" ) self . candMap = candMap # The Preference class is also not directly JSON serializable, so we exported the # underlying dictionary for each Preference object. When we import, we will create a # Preference object from these dictionaries. self . preferences = [ ] for preferenceMap in data [ "preferences" ] : count = int ( preferenceMap [ "count" ] ) # Because json.load imports all the items in the wmgMap as unicode strings, we need to # convert all the keys and values into integers. preferenceWmgMap = preferenceMap [ "wmgMap" ] wmgMap = dict ( ) for key in preferenceWmgMap . keys ( ) : wmgMap [ int ( key ) ] = dict ( ) for key2 in preferenceWmgMap [ key ] . keys ( ) : wmgMap [ int ( key ) ] [ int ( key2 ) ] = int ( preferenceWmgMap [ key ] [ key2 ] ) self . preferences . append ( Preference ( wmgMap , count ) )
10,114
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/profile.py#L314-L353
[ "def", "create_software_renderer", "(", "self", ",", "surface", ")", ":", "renderer", "=", "object", ".", "__new__", "(", "Renderer", ")", "renderer", ".", "_ptr", "=", "self", ".", "_ptr", "=", "check_ptr_err", "(", "lib", ".", "SDL_CreateSoftwareRenderer", "(", "surface", ".", "_ptr", ")", ")", "return", "renderer" ]
Driver function for the computation of the MM algorithm
def main ( ) : # test example below taken from GMMRA by Azari, Chen, Parkes, & Xia cand_set = [ 0 , 1 , 2 ] votes = [ [ 0 , 1 , 2 ] , [ 1 , 2 , 0 ] ] mmagg = MMPLAggregator ( cand_set ) gamma = mmagg . aggregate ( votes , epsilon = 1e-7 , max_iters = 20 ) print ( mmagg . alts_to_ranks , mmagg . ranks_to_alts ) assert ( [ mmagg . get_ranking ( i ) for i in cand_set ] == [ 1 , 0 , 2 ] ) print ( gamma )
10,115
https://github.com/PrefPy/prefpy/blob/f395ba3782f05684fa5de0cece387a6da9391d02/prefpy/mmgbtl.py#L73-L84
[ "def", "get_context_data", "(", "self", ",", "*", "*", "kwargs", ")", ":", "context", "=", "super", "(", "TabView", ",", "self", ")", ".", "get_context_data", "(", "*", "*", "kwargs", ")", "try", ":", "tab_group", "=", "self", ".", "get_tabs", "(", "self", ".", "request", ",", "*", "*", "kwargs", ")", "context", "[", "\"tab_group\"", "]", "=", "tab_group", "# Make sure our data is pre-loaded to capture errors.", "context", "[", "\"tab_group\"", "]", ".", "load_tab_data", "(", ")", "except", "Exception", ":", "exceptions", ".", "handle", "(", "self", ".", "request", ")", "return", "context" ]
Generates and returns URL for redirecting to Login Page of RunKeeper which is the Authorization Endpoint of Health Graph API .
def get_login_url ( self , state = None ) : payload = { 'response_type' : 'code' , 'client_id' : self . _client_id , 'redirect_uri' : self . _redirect_uri , } if state is not None : payload [ 'state' ] = state return "%s?%s" % ( settings . API_AUTHORIZATION_URL , urllib . urlencode ( payload ) )
10,116
https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L47-L62
[ "def", "diff_sizes", "(", "a", ",", "b", ",", "progressbar", "=", "None", ")", ":", "difference", "=", "[", "]", "for", "i", "in", "a", ".", "identifiers", ":", "a_size", "=", "a", ".", "item_properties", "(", "i", ")", "[", "\"size_in_bytes\"", "]", "b_size", "=", "b", ".", "item_properties", "(", "i", ")", "[", "\"size_in_bytes\"", "]", "if", "a_size", "!=", "b_size", ":", "difference", ".", "append", "(", "(", "i", ",", "a_size", ",", "b_size", ")", ")", "if", "progressbar", ":", "progressbar", ".", "update", "(", "1", ")", "return", "difference" ]
Return URL for image used for RunKeeper Login button .
def get_login_button_url ( self , button_color = None , caption_color = None , button_size = None ) : if not button_color in settings . LOGIN_BUTTON_COLORS : button_color = settings . LOGIN_BUTTON_COLORS [ 0 ] if not caption_color in settings . LOGIN_BUTTON_CAPTION_COLORS : caption_color = settings . LOGIN_BUTTON_CAPTION_COLORS [ 0 ] if settings . LOGIN_BUTTON_SIZES . has_key ( button_size ) : button_size = settings . LOGIN_BUTTON_SIZES [ button_size ] else : button_size = settings . LOGIN_BUTTON_SIZES [ 'None' ] return settings . LOGIN_BUTTON_URL % ( button_color , caption_color , button_size )
10,117
https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L64-L86
[ "def", "uunion1d", "(", "arr1", ",", "arr2", ")", ":", "v", "=", "np", ".", "union1d", "(", "arr1", ",", "arr2", ")", "v", "=", "_validate_numpy_wrapper_units", "(", "v", ",", "[", "arr1", ",", "arr2", "]", ")", "return", "v" ]
Returns Access Token retrieved from the Health Graph API Token Endpoint following the login to RunKeeper . to RunKeeper .
def get_access_token ( self , code ) : payload = { 'grant_type' : 'authorization_code' , 'code' : code , 'client_id' : self . _client_id , 'client_secret' : self . _client_secret , 'redirect_uri' : self . _redirect_uri , } req = requests . post ( settings . API_ACCESS_TOKEN_URL , data = payload ) data = req . json ( ) return data . get ( 'access_token' )
10,118
https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L88-L105
[ "def", "broadcast_indices", "(", "x", ",", "minv", ",", "ndim", ",", "axis", ")", ":", "ret", "=", "[", "]", "for", "dim", "in", "range", "(", "ndim", ")", ":", "if", "dim", "==", "axis", ":", "ret", ".", "append", "(", "minv", ")", "else", ":", "broadcast_slice", "=", "[", "np", ".", "newaxis", "]", "*", "ndim", "broadcast_slice", "[", "dim", "]", "=", "slice", "(", "None", ")", "dim_inds", "=", "np", ".", "arange", "(", "x", ".", "shape", "[", "dim", "]", ")", "ret", ".", "append", "(", "dim_inds", "[", "tuple", "(", "broadcast_slice", ")", "]", ")", "return", "tuple", "(", "ret", ")" ]
Revokes the Access Token by accessing the De - authorization Endpoint of Health Graph API .
def revoke_access_token ( self , access_token ) : payload = { 'access_token' : access_token , } req = requests . post ( settings . API_DEAUTHORIZATION_URL , data = payload )
10,119
https://github.com/aouyar/healthgraph-api/blob/fc5135ab353ca1f05e8a70ec784ff921e686c072/healthgraph/authmgr.py#L107-L115
[ "def", "broadcast", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", "dataarray", "import", "DataArray", "from", ".", "dataset", "import", "Dataset", "exclude", "=", "kwargs", ".", "pop", "(", "'exclude'", ",", "None", ")", "if", "exclude", "is", "None", ":", "exclude", "=", "set", "(", ")", "if", "kwargs", ":", "raise", "TypeError", "(", "'broadcast() got unexpected keyword arguments: %s'", "%", "list", "(", "kwargs", ")", ")", "args", "=", "align", "(", "*", "args", ",", "join", "=", "'outer'", ",", "copy", "=", "False", ",", "exclude", "=", "exclude", ")", "common_coords", "=", "OrderedDict", "(", ")", "dims_map", "=", "OrderedDict", "(", ")", "for", "arg", "in", "args", ":", "for", "dim", "in", "arg", ".", "dims", ":", "if", "dim", "not", "in", "common_coords", "and", "dim", "not", "in", "exclude", ":", "dims_map", "[", "dim", "]", "=", "arg", ".", "sizes", "[", "dim", "]", "if", "dim", "in", "arg", ".", "coords", ":", "common_coords", "[", "dim", "]", "=", "arg", ".", "coords", "[", "dim", "]", ".", "variable", "def", "_set_dims", "(", "var", ")", ":", "# Add excluded dims to a copy of dims_map", "var_dims_map", "=", "dims_map", ".", "copy", "(", ")", "for", "dim", "in", "exclude", ":", "with", "suppress", "(", "ValueError", ")", ":", "# ignore dim not in var.dims", "var_dims_map", "[", "dim", "]", "=", "var", ".", "shape", "[", "var", ".", "dims", ".", "index", "(", "dim", ")", "]", "return", "var", ".", "set_dims", "(", "var_dims_map", ")", "def", "_broadcast_array", "(", "array", ")", ":", "data", "=", "_set_dims", "(", "array", ".", "variable", ")", "coords", "=", "OrderedDict", "(", "array", ".", "coords", ")", "coords", ".", "update", "(", "common_coords", ")", "return", "DataArray", "(", "data", ",", "coords", ",", "data", ".", "dims", ",", "name", "=", "array", ".", "name", ",", "attrs", "=", "array", ".", "attrs", ")", "def", "_broadcast_dataset", "(", "ds", ")", ":", "data_vars", "=", "OrderedDict", "(", "(", "k", ",", "_set_dims", "(", "ds", ".", "variables", "[", "k", "]", ")", ")", "for", "k", "in", "ds", ".", "data_vars", ")", "coords", "=", "OrderedDict", "(", "ds", ".", "coords", ")", "coords", ".", "update", "(", "common_coords", ")", "return", "Dataset", "(", "data_vars", ",", "coords", ",", "ds", ".", "attrs", ")", "result", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "DataArray", ")", ":", "result", ".", "append", "(", "_broadcast_array", "(", "arg", ")", ")", "elif", "isinstance", "(", "arg", ",", "Dataset", ")", ":", "result", ".", "append", "(", "_broadcast_dataset", "(", "arg", ")", ")", "else", ":", "raise", "ValueError", "(", "'all input must be Dataset or DataArray objects'", ")", "return", "tuple", "(", "result", ")" ]
Splits the list of time intervals in the specified points
def split ( self , points ) : for p in points : for i in range ( len ( self . intervals ) ) : if ( self . intervals [ i ] . start < p ) and ( self . intervals [ i ] . end > p ) : self . intervals = ( self . intervals [ : i ] + [ TimeInterval ( self . intervals [ i ] . start , p ) , TimeInterval ( p , self . intervals [ i ] . end ) ] + self . intervals [ ( i + 1 ) : ] ) break
10,120
https://github.com/IRC-SPHERE/HyperStream/blob/98478f4d31ed938f4aa7c958ed0d4c3ffcb2e780/hyperstream/time_interval.py#L118-L135
[ "def", "cublasSgbmv", "(", "handle", ",", "trans", ",", "m", ",", "n", ",", "kl", ",", "ku", ",", "alpha", ",", "A", ",", "lda", ",", "x", ",", "incx", ",", "beta", ",", "y", ",", "incy", ")", ":", "status", "=", "_libcublas", ".", "cublasSgbmv_v2", "(", "handle", ",", "trans", ",", "m", ",", "n", ",", "kl", ",", "ku", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_float", "(", "alpha", ")", ")", ",", "int", "(", "A", ")", ",", "lda", ",", "int", "(", "x", ")", ",", "incx", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_float", "(", "beta", ")", ")", ",", "int", "(", "y", ")", ",", "incy", ")", "cublasCheckStatus", "(", "status", ")" ]
Create a new Workflow Object with given content .
def create ( cls , data , * * kwargs ) : with db . session . begin_nested ( ) : model = cls . dbmodel ( * * kwargs ) model . data = data obj = cls ( model ) db . session . add ( obj . model ) return obj
10,121
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L139-L146
[ "def", "setGroups", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "requests", "=", "0", "groups", "=", "[", "]", "try", ":", "for", "gk", "in", "self", "[", "'groupKeys'", "]", ":", "try", ":", "g", "=", "self", ".", "mambugroupclass", "(", "entid", "=", "gk", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "AttributeError", "as", "ae", ":", "from", ".", "mambugroup", "import", "MambuGroup", "self", ".", "mambugroupclass", "=", "MambuGroup", "g", "=", "self", ".", "mambugroupclass", "(", "entid", "=", "gk", ",", "*", "args", ",", "*", "*", "kwargs", ")", "requests", "+=", "1", "groups", ".", "append", "(", "g", ")", "except", "KeyError", ":", "pass", "self", "[", "'groups'", "]", "=", "groups", "return", "requests" ]
Return a workflow object from id .
def get ( cls , id_ ) : with db . session . no_autoflush : query = cls . dbmodel . query . filter_by ( id = id_ ) try : model = query . one ( ) except NoResultFound : raise WorkflowsMissingObject ( "No object for for id {0}" . format ( id_ ) ) return cls ( model )
10,122
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L149-L159
[ "def", "is_registration_possible", "(", "self", ",", "user_info", ")", ":", "return", "self", ".", "get_accessibility", "(", ")", ".", "is_open", "(", ")", "and", "self", ".", "_registration", ".", "is_open", "(", ")", "and", "self", ".", "is_user_accepted_by_access_control", "(", "user_info", ")" ]
Wrap sqlalchemy query methods .
def query ( cls , * criteria , * * filters ) : query = cls . dbmodel . query . filter ( * criteria ) . filter_by ( * * filters ) return [ cls ( obj ) for obj in query . all ( ) ]
10,123
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L162-L187
[ "def", "handle_input", "(", "self", ")", ":", "difference", "=", "self", ".", "check_state", "(", ")", "if", "not", "difference", ":", "return", "self", ".", "events", "=", "[", "]", "self", ".", "handle_new_events", "(", "difference", ")", "self", ".", "update_timeval", "(", ")", "self", ".", "events", ".", "append", "(", "self", ".", "sync_marker", "(", "self", ".", "timeval", ")", ")", "self", ".", "write_to_pipe", "(", "self", ".", "events", ")" ]
Delete a workflow object .
def delete ( self , force = False ) : if self . model is None : raise WorkflowsMissingModel ( ) with db . session . begin_nested ( ) : db . session . delete ( self . model ) return self
10,124
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L189-L206
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Set the action to be taken for this object .
def set_action ( self , action , message ) : self . extra_data [ "_action" ] = action self . extra_data [ "_message" ] = message
10,125
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L280-L297
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Run the workflow specified on the object .
def start_workflow ( self , workflow_name , delayed = False , * * kwargs ) : from . tasks import start if delayed : self . save ( ) db . session . commit ( ) return start . delay ( workflow_name , object_id = self . id , * * kwargs ) else : return start ( workflow_name , data = [ self ] , * * kwargs )
10,126
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L327-L345
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Continue the workflow for this object .
def continue_workflow ( self , start_point = "continue_next" , delayed = False , * * kwargs ) : from . tasks import resume self . save ( ) if not self . id_workflow : raise WorkflowAPIError ( "No workflow associated with object: %r" % ( repr ( self ) , ) ) if delayed : db . session . commit ( ) return resume . delay ( self . id , start_point , * * kwargs ) else : return resume ( self . id , start_point , * * kwargs )
10,127
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L347-L378
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Return dictionary of current task function info for this object .
def get_current_task_info ( self ) : name = self . model . workflow . name if not name : return current_task = workflows [ name ] . workflow for step in self . callback_pos : current_task = current_task [ step ] if callable ( current_task ) : return get_func_info ( current_task )
10,128
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/api.py#L380-L390
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
We convert an error code into certain action over start_response and return a WSGI - compliant payload .
def canned_handlers ( self , environ , start_response , code = '200' , headers = [ ] ) : headerbase = [ ( 'Content-Type' , 'text/plain' ) ] if headers : hObj = Headers ( headerbase ) for header in headers : hObj [ header [ 0 ] ] = '; ' . join ( header [ 1 : ] ) start_response ( self . canned_collection [ code ] , headerbase ) return [ '' ]
10,129
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/server/git_http_backend.py#L79-L90
[ "def", "delete_item", "(", "key", ")", ":", "CACHED_KEY_FILE", "=", "os", ".", "path", ".", "join", "(", "CURRENT_DIR", ",", "key", ")", "if", "os", ".", "path", ".", "isfile", "(", "CACHED_KEY_FILE", ")", ":", "os", ".", "remove", "(", "CACHED_KEY_FILE", ")" ]
prints some information about the shell scope
def info_shell_scope ( self ) : Console . ok ( "{:>20} = {:}" . format ( "ECHO" , self . echo ) ) Console . ok ( "{:>20} = {:}" . format ( "DEBUG" , self . debug ) ) Console . ok ( "{:>20} = {:}" . format ( "LOGLEVEL" , self . loglevel ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPE" , self . active_scope ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPES" , self . scopes ) ) Console . ok ( "{:>20} = {:}" . format ( "SCOPELESS" , self . scopeless ) ) Console . ok ( "{:>20} = {:}" . format ( "prompt" , self . prompt ) ) Console . ok ( "{:>20} = {:}" . format ( "scripts" , self . scripts ) ) Console . ok ( "{:>20} = {:}" . format ( "variables" , self . variables ) )
10,130
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/shell_scope.py#L28-L38
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]
activates the shell scope
def activate_shell_scope ( self ) : self . variables = { } self . prompt = 'cm> ' self . active_scope = "" self . scopes = [ ] self . scopeless = [ 'load' , 'info' , 'var' , 'use' , 'quit' , 'q' , 'help' ]
10,131
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/shell_scope.py#L40-L46
[ "def", "readme_verify", "(", ")", ":", "expected", "=", "populate_readme", "(", "REVISION", ",", "RTD_VERSION", ")", "# Actually get the stored contents.", "with", "open", "(", "README_FILE", ",", "\"r\"", ")", "as", "file_obj", ":", "contents", "=", "file_obj", ".", "read", "(", ")", "if", "contents", "!=", "expected", ":", "err_msg", "=", "\"\\n\"", "+", "get_diff", "(", "contents", ",", "expected", ",", "\"README.rst.actual\"", ",", "\"README.rst.expected\"", ")", "raise", "ValueError", "(", "err_msg", ")", "else", ":", "print", "(", "\"README contents are as expected.\"", ")" ]
Generates the stack of functions to call . It looks at the ordered list of all middlewares and only keeps those which have the method we re trying to call .
def _build_stack ( self ) -> List [ Callable ] : stack = [ ] for m in self . manager . middlewares : try : stack . append ( getattr ( m ( self ) , self . name ) ) except AttributeError : pass return stack
10,132
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L53-L68
[ "def", "set_baudrate", "(", "self", ",", "channel", ",", "BTR", ",", "baudarate", ")", ":", "UcanSetBaudrateEx", "(", "self", ".", "_handle", ",", "channel", ",", "BTR", ">>", "8", ",", "BTR", ",", "baudarate", ")" ]
Creates initializes and returns a unique MiddlewareManager instance .
def instance ( cls ) -> 'MiddlewareManager' : if cls . _instance is None : cls . _instance = cls ( ) cls . _instance . init ( ) return cls . _instance
10,133
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L124-L132
[ "def", "get_consumption", "(", "self", ")", ":", "self", ".", "get_status", "(", ")", "try", ":", "self", ".", "consumption", "=", "self", ".", "data", "[", "'power'", "]", "except", "TypeError", ":", "self", ".", "consumption", "=", "0", "return", "self", ".", "consumption" ]
Checks that the configuration makes sense .
def health_check ( cls ) : try : assert isinstance ( settings . MIDDLEWARES , list ) except AssertionError : yield HealthCheckFail ( '00005' , 'The "MIDDLEWARES" configuration key should be assigned ' 'to a list' , ) return for m in settings . MIDDLEWARES : try : c = import_class ( m ) except ( TypeError , ValueError , AttributeError , ImportError ) : yield HealthCheckFail ( '00005' , f'Cannot import middleware "{m}"' , ) else : if not issubclass ( c , BaseMiddleware ) : yield HealthCheckFail ( '00005' , f'Middleware "{m}" does not implement ' f'"BaseMiddleware"' , )
10,134
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L135-L164
[ "async", "def", "update_lease_async", "(", "self", ",", "lease", ")", ":", "if", "lease", "is", "None", ":", "return", "False", "if", "not", "lease", ".", "token", ":", "return", "False", "_logger", ".", "debug", "(", "\"Updating lease %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ")", "# First, renew the lease to make sure the update will go through.", "if", "await", "self", ".", "renew_lease_async", "(", "lease", ")", ":", "try", ":", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "lease", ".", "partition_id", ",", "json", ".", "dumps", "(", "lease", ".", "serializable", "(", ")", ")", ",", "lease_id", "=", "lease", ".", "token", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to update lease %r %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ",", "err", ")", "raise", "err", "else", ":", "return", "False", "return", "True" ]
Get the function to call which will run all middlewares .
def get ( self , name : Text , final : C ) -> C : # noinspection PyTypeChecker return Caller ( self , name , final )
10,135
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/middleware/_manager.py#L173-L184
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Return a Loci object giving the loci specified on the command line .
def load_from_args ( args ) : if not args . locus : return None loci_iterator = ( Locus . parse ( locus ) for locus in args . locus ) # if args.neighbor_offsets: # loci_iterator = expand_with_neighbors( # loci_iterator, args.neighbor_offsets) return Loci ( loci_iterator )
10,136
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/loci_util.py#L30-L48
[ "def", "_randomize_subject_list", "(", "data_list", ",", "random", ")", ":", "if", "random", "==", "RandomType", ".", "REPRODUCIBLE", ":", "for", "i", "in", "range", "(", "len", "(", "data_list", ")", ")", ":", "_randomize_single_subject", "(", "data_list", "[", "i", "]", ",", "seed", "=", "i", ")", "elif", "random", "==", "RandomType", ".", "UNREPRODUCIBLE", ":", "for", "data", "in", "data_list", ":", "_randomize_single_subject", "(", "data", ")" ]
Creates a string representing the date information provided by the given timestamp object .
def format_date ( cls , timestamp ) : if not timestamp : raise DateTimeFormatterException ( 'timestamp must a valid string {}' . format ( timestamp ) ) return timestamp . strftime ( cls . DATE_FORMAT )
10,137
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L30-L38
[ "def", "removeApplicationManifest", "(", "self", ",", "pchApplicationManifestFullPath", ")", ":", "fn", "=", "self", ".", "function_table", ".", "removeApplicationManifest", "result", "=", "fn", "(", "pchApplicationManifestFullPath", ")", "return", "result" ]
Creates a string representing the date and time information provided by the given timestamp object .
def format_datetime ( cls , timestamp ) : if not timestamp : raise DateTimeFormatterException ( 'timestamp must a valid string {}' . format ( timestamp ) ) return timestamp . strftime ( cls . DATETIME_FORMAT )
10,138
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L41-L49
[ "def", "removeApplicationManifest", "(", "self", ",", "pchApplicationManifestFullPath", ")", ":", "fn", "=", "self", ".", "function_table", ".", "removeApplicationManifest", "result", "=", "fn", "(", "pchApplicationManifestFullPath", ")", "return", "result" ]
Tries to extract a datetime object from the given string expecting date information only .
def extract_date ( cls , date_str ) : if not date_str : raise DateTimeFormatterException ( 'date_str must a valid string {}.' . format ( date_str ) ) try : return cls . _extract_timestamp ( date_str , cls . DATE_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid date string {}.' . format ( date_str ) )
10,139
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L52-L65
[ "def", "setDefaultApplicationForMimeType", "(", "self", ",", "pchAppKey", ",", "pchMimeType", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setDefaultApplicationForMimeType", "result", "=", "fn", "(", "pchAppKey", ",", "pchMimeType", ")", "return", "result" ]
Tries to extract a datetime object from the given string including time information .
def extract_datetime ( cls , datetime_str ) : if not datetime_str : raise DateTimeFormatterException ( 'datetime_str must a valid string' ) try : return cls . _extract_timestamp ( datetime_str , cls . DATETIME_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid datetime string {}.' . format ( datetime_str ) )
10,140
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L68-L81
[ "def", "click_action", "(", "self", ")", ":", "cNvPr", "=", "self", ".", "_element", ".", "_nvXxPr", ".", "cNvPr", "return", "ActionSetting", "(", "cNvPr", ",", "self", ")" ]
Tries to extract a datetime object from the given string including only hours .
def extract_datetime_hour ( cls , datetime_str ) : if not datetime_str : raise DateTimeFormatterException ( 'datetime_str must a valid string' ) try : return cls . _extract_timestamp ( datetime_str , cls . DATETIME_HOUR_FORMAT ) except ( TypeError , ValueError ) : raise DateTimeFormatterException ( 'Invalid datetime string {}.' . format ( datetime_str ) )
10,141
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L84-L96
[ "def", "derivativeZ", "(", "self", ",", "mLvl", ",", "pLvl", ",", "MedShk", ")", ":", "xLvl", "=", "self", ".", "xFunc", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dxdShk", "=", "self", ".", "xFunc", ".", "derivativeZ", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dcdx", "=", "self", ".", "cFunc", ".", "derivativeX", "(", "xLvl", ",", "MedShk", ")", "dcdShk", "=", "dxdShk", "*", "dcdx", "+", "self", ".", "cFunc", ".", "derivativeY", "(", "xLvl", ",", "MedShk", ")", "dMeddShk", "=", "(", "dxdShk", "-", "dcdShk", ")", "/", "self", ".", "MedPrice", "return", "dcdShk", ",", "dMeddShk" ]
Tries to extract a datetime object from the given string . First the datetime format is tried if it fails the date format is used for extraction .
def extract ( cls , timestamp_str ) : if not timestamp_str : raise DateTimeFormatterException ( 'timestamp_str must a valid string {}' . format ( timestamp_str ) ) if isinstance ( timestamp_str , ( date , datetime ) ) : return timestamp_str try : return cls . extract_datetime ( timestamp_str ) except DateTimeFormatterException : pass try : return cls . extract_datetime_hour ( timestamp_str ) except DateTimeFormatterException : pass try : return cls . extract_date ( timestamp_str ) except DateTimeFormatterException as e : raise DateTimeFormatterException ( e )
10,142
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/date_formatter.py#L99-L127
[ "def", "save_entity_signal_handler", "(", "sender", ",", "instance", ",", "*", "*", "kwargs", ")", ":", "if", "instance", ".", "__class__", "in", "entity_registry", ".", "entity_registry", ":", "sync_entities", "(", "instance", ")", "if", "instance", ".", "__class__", "in", "entity_registry", ".", "entity_watching", ":", "sync_entities_watching", "(", "instance", ")" ]
Restart the workflow from a given workflow engine UUID .
def restart ( uuid , * * kwargs ) : from . worker_engine import restart_worker return text_type ( restart_worker ( uuid , * * kwargs ) . uuid )
10,143
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/tasks.py#L107-L110
[ "def", "is_registration_possible", "(", "self", ",", "user_info", ")", ":", "return", "self", ".", "get_accessibility", "(", ")", ".", "is_open", "(", ")", "and", "self", ".", "_registration", ".", "is_open", "(", ")", "and", "self", ".", "is_user_accepted_by_access_control", "(", "user_info", ")" ]
Import all submodules and register them in the context namespace .
def import_submodules ( context , root_module , path ) : for _ , module_name , _ in pkgutil . walk_packages ( path , root_module + '.' ) : # this causes a Runtime error with model conflicts # module = loader.find_module(module_name).load_module(module_name) module = __import__ ( module_name , globals ( ) , locals ( ) , [ '__name__' ] ) for k , v in vars ( module ) . items ( ) : if not k . startswith ( '_' ) : context [ k ] = v context [ module_name ] = module
10,144
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/imports.py#L36-L49
[ "def", "merge", "(", "self", ",", "buffer", ",", "other_hyper_log_log", ")", ":", "for", "i", "in", "range", "(", "len", "(", "buffer", ")", ")", ":", "buffer", "[", "i", "]", "=", "max", "(", "buffer", "[", "i", "]", ",", "other_hyper_log_log", "[", "i", "]", ")" ]
A decorator to create a function with docopt arguments . It also generates a help function
def command ( func ) : classname = inspect . getouterframes ( inspect . currentframe ( ) ) [ 1 ] [ 3 ] name = func . __name__ help_name = name . replace ( "do_" , "help_" ) doc = textwrap . dedent ( func . __doc__ ) def new ( instance , args ) : # instance.new.__doc__ = doc try : argv = shlex . split ( args ) arguments = docopt ( doc , help = True , argv = argv ) func ( instance , args , arguments ) except SystemExit : if args not in ( '-h' , '--help' ) : Console . error ( "Could not execute the command." ) print ( doc ) new . __doc__ = doc return new
10,145
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/shell.py#L224-L260
[ "def", "EXCHANGE", "(", "classical_reg1", ",", "classical_reg2", ")", ":", "left", "=", "unpack_classical_reg", "(", "classical_reg1", ")", "right", "=", "unpack_classical_reg", "(", "classical_reg2", ")", "return", "ClassicalExchange", "(", "left", ",", "right", ")" ]
Adds a file to the version
def addFile ( self , path , msg = "" ) : item = Item . from_path ( repo = self . repo , path = path ) self . addItem ( item )
10,146
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L32-L35
[ "def", "get_pulse_s", "(", "self", ")", ":", "try", ":", "dwelltime", "=", "self", ".", "ppg", ".", "dwelltime", ".", "mean", "beam_on", "=", "self", ".", "ppg", ".", "beam_on", ".", "mean", "except", "AttributeError", ":", "raise", "AttributeError", "(", "\"Missing logged ppg parameter: dwelltime \"", "+", "\"or beam_on\"", ")", "return", "dwelltime", "*", "beam_on", "/", "1000." ]
Adds an item if the tree is mutable
def addItem ( self , item ) : try : self . tree . addItem ( item ) except AttributeError , e : raise VersionError ( 'Saved versions are immutable' )
10,147
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L37-L42
[ "def", "list_conversions", "(", "api_key", ",", "api_secret", ",", "video_key", ",", "*", "*", "kwargs", ")", ":", "jwplatform_client", "=", "jwplatform", ".", "Client", "(", "api_key", ",", "api_secret", ")", "logging", ".", "info", "(", "\"Querying for video conversions.\"", ")", "try", ":", "response", "=", "jwplatform_client", ".", "videos", ".", "conversions", ".", "list", "(", "video_key", "=", "video_key", ",", "*", "*", "kwargs", ")", "except", "jwplatform", ".", "errors", ".", "JWPlatformError", "as", "e", ":", "logging", ".", "error", "(", "\"Encountered an error querying for video conversions.\\n{}\"", ".", "format", "(", "e", ")", ")", "sys", ".", "exit", "(", "e", ".", "message", ")", "return", "response" ]
Removes an item if the tree is mutable
def removeItem ( self , item ) : try : self . tree . removeItem ( item ) except AttributeError , e : raise VersionError ( 'Saved versions are immutable' )
10,148
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L44-L49
[ "def", "list_conversions", "(", "api_key", ",", "api_secret", ",", "video_key", ",", "*", "*", "kwargs", ")", ":", "jwplatform_client", "=", "jwplatform", ".", "Client", "(", "api_key", ",", "api_secret", ")", "logging", ".", "info", "(", "\"Querying for video conversions.\"", ")", "try", ":", "response", "=", "jwplatform_client", ".", "videos", ".", "conversions", ".", "list", "(", "video_key", "=", "video_key", ",", "*", "*", "kwargs", ")", "except", "jwplatform", ".", "errors", ".", "JWPlatformError", "as", "e", ":", "logging", ".", "error", "(", "\"Encountered an error querying for video conversions.\\n{}\"", ".", "format", "(", "e", ")", ")", "sys", ".", "exit", "(", "e", ".", "message", ")", "return", "response" ]
Generator that yields Items
def iteritems ( self ) : if self . type in [ 'blob' ] : raise StopIteration for path , mode , sha in self . tree . iteritems ( ) : item = Item ( self , sha , path , mode ) yield item for i in item . iteritems ( ) : yield i
10,149
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L51-L60
[ "def", "mock_xray_client", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "print", "(", "\"Starting X-Ray Patch\"", ")", "old_xray_context_var", "=", "os", ".", "environ", ".", "get", "(", "'AWS_XRAY_CONTEXT_MISSING'", ")", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "'LOG_ERROR'", "old_xray_context", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "old_xray_emitter", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "AWSContext", "(", ")", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "MockEmitter", "(", ")", "try", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "old_xray_context_var", "is", "None", ":", "del", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "else", ":", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "old_xray_context_var", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "old_xray_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "old_xray_context", "return", "_wrapped" ]
Returns set of items .
def items ( self , path = None ) : items = list ( self . iteritems ( ) ) if path is not None : path += '$' regex = re . compile ( path ) items = [ i for i in items if regex . match ( i . path ) ] return items
10,150
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L63-L76
[ "def", "mock_xray_client", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "print", "(", "\"Starting X-Ray Patch\"", ")", "old_xray_context_var", "=", "os", ".", "environ", ".", "get", "(", "'AWS_XRAY_CONTEXT_MISSING'", ")", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "'LOG_ERROR'", "old_xray_context", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "old_xray_emitter", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "AWSContext", "(", ")", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "MockEmitter", "(", ")", "try", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "old_xray_context_var", "is", "None", ":", "del", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "else", ":", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "old_xray_context_var", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "old_xray_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "old_xray_context", "return", "_wrapped" ]
read blob on access only because get_object is slow
def _get_blob ( self ) : if not self . __blob : self . __blob = self . repo . get_object ( self . id ) return self . __blob
10,151
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L124-L128
[ "def", "set_physical_page_for_file", "(", "self", ",", "pageId", ",", "ocrd_file", ",", "order", "=", "None", ",", "orderlabel", "=", "None", ")", ":", "# print(pageId, ocrd_file)", "# delete any page mapping for this file.ID", "for", "el_fptr", "in", "self", ".", "_tree", ".", "getroot", "(", ")", ".", "findall", "(", "'mets:structMap[@TYPE=\"PHYSICAL\"]/mets:div[@TYPE=\"physSequence\"]/mets:div[@TYPE=\"page\"]/mets:fptr[@FILEID=\"%s\"]'", "%", "ocrd_file", ".", "ID", ",", "namespaces", "=", "NS", ")", ":", "el_fptr", ".", "getparent", "(", ")", ".", "remove", "(", "el_fptr", ")", "# find/construct as necessary", "el_structmap", "=", "self", ".", "_tree", ".", "getroot", "(", ")", ".", "find", "(", "'mets:structMap[@TYPE=\"PHYSICAL\"]'", ",", "NS", ")", "if", "el_structmap", "is", "None", ":", "el_structmap", "=", "ET", ".", "SubElement", "(", "self", ".", "_tree", ".", "getroot", "(", ")", ",", "TAG_METS_STRUCTMAP", ")", "el_structmap", ".", "set", "(", "'TYPE'", ",", "'PHYSICAL'", ")", "el_seqdiv", "=", "el_structmap", ".", "find", "(", "'mets:div[@TYPE=\"physSequence\"]'", ",", "NS", ")", "if", "el_seqdiv", "is", "None", ":", "el_seqdiv", "=", "ET", ".", "SubElement", "(", "el_structmap", ",", "TAG_METS_DIV", ")", "el_seqdiv", ".", "set", "(", "'TYPE'", ",", "'physSequence'", ")", "el_pagediv", "=", "el_seqdiv", ".", "find", "(", "'mets:div[@ID=\"%s\"]'", "%", "pageId", ",", "NS", ")", "if", "el_pagediv", "is", "None", ":", "el_pagediv", "=", "ET", ".", "SubElement", "(", "el_seqdiv", ",", "TAG_METS_DIV", ")", "el_pagediv", ".", "set", "(", "'TYPE'", ",", "'page'", ")", "el_pagediv", ".", "set", "(", "'ID'", ",", "pageId", ")", "if", "order", ":", "el_pagediv", ".", "set", "(", "'ORDER'", ",", "order", ")", "if", "orderlabel", ":", "el_pagediv", ".", "set", "(", "'ORDERLABEL'", ",", "orderlabel", ")", "el_fptr", "=", "ET", ".", "SubElement", "(", "el_pagediv", ",", "TAG_METS_FPTR", ")", "el_fptr", ".", "set", "(", "'FILEID'", ",", "ocrd_file", ".", "ID", ")" ]
Create a new Item from a file path .
def from_path ( self , repo , path , name = None ) : if name is None : name = os . path . basename ( path ) #FIXME: hack, there has to be a better way return Item . from_string ( repo = repo , name = name , string = open ( path ) . read ( ) )
10,152
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L146-L159
[ "def", "__is_json_error", "(", "self", ",", "status", ",", "headers", ")", ":", "content_header", "=", "headers", ".", "get", "(", "'content-type'", ",", "''", ")", "content_type", ",", "unused_params", "=", "cgi", ".", "parse_header", "(", "content_header", ")", "return", "(", "status", ".", "startswith", "(", "'400'", ")", "and", "content_type", ".", "lower", "(", ")", "in", "_ALL_JSON_CONTENT_TYPES", ")" ]
Create a new Item from a data stream .
def from_string ( self , repo , name , string ) : try : log . debug ( 'Creating new item: %s' % name ) blob = Blob . from_string ( string ) item = Item ( parent = repo , sha = blob . sha , path = name ) item . blob = blob return item except AssertionError , e : raise ItemError ( e )
10,153
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L162-L179
[ "def", "_index_verify", "(", "index_file", ",", "*", "*", "extra_kwargs", ")", ":", "side_effect", "=", "extra_kwargs", ".", "pop", "(", "\"side_effect\"", ",", "None", ")", "with", "open", "(", "TEMPLATE_FILE", ",", "\"r\"", ")", "as", "file_obj", ":", "template", "=", "file_obj", ".", "read", "(", ")", "template_kwargs", "=", "{", "\"code_block1\"", ":", "SPHINX_CODE_BLOCK1", ",", "\"code_block2\"", ":", "SPHINX_CODE_BLOCK2", ",", "\"code_block3\"", ":", "SPHINX_CODE_BLOCK3", ",", "\"testcleanup\"", ":", "TEST_CLEANUP", ",", "\"toctree\"", ":", "TOCTREE", ",", "\"bernstein_basis\"", ":", "BERNSTEIN_BASIS_SPHINX", ",", "\"bezier_defn\"", ":", "BEZIER_DEFN_SPHINX", ",", "\"sum_to_unity\"", ":", "SUM_TO_UNITY_SPHINX", ",", "\"img_prefix\"", ":", "\"\"", ",", "\"extra_links\"", ":", "\"\"", ",", "\"docs\"", ":", "\"\"", ",", "\"docs_img\"", ":", "\"\"", ",", "\"pypi\"", ":", "\"\\n\\n|pypi| \"", ",", "\"pypi_img\"", ":", "PYPI_IMG", ",", "\"versions\"", ":", "\"|versions|\\n\\n\"", ",", "\"versions_img\"", ":", "VERSIONS_IMG", ",", "\"rtd_version\"", ":", "RTD_VERSION", ",", "\"revision\"", ":", "REVISION", ",", "\"circleci_badge\"", ":", "CIRCLECI_BADGE", ",", "\"circleci_path\"", ":", "\"\"", ",", "\"travis_badge\"", ":", "TRAVIS_BADGE", ",", "\"travis_path\"", ":", "\"\"", ",", "\"appveyor_badge\"", ":", "APPVEYOR_BADGE", ",", "\"appveyor_path\"", ":", "\"\"", ",", "\"coveralls_badge\"", ":", "COVERALLS_BADGE", ",", "\"coveralls_path\"", ":", "COVERALLS_PATH", ",", "\"zenodo\"", ":", "\"|zenodo|\"", ",", "\"zenodo_img\"", ":", "ZENODO_IMG", ",", "\"joss\"", ":", "\" |JOSS|\"", ",", "\"joss_img\"", ":", "JOSS_IMG", ",", "}", "template_kwargs", ".", "update", "(", "*", "*", "extra_kwargs", ")", "expected", "=", "template", ".", "format", "(", "*", "*", "template_kwargs", ")", "if", "side_effect", "is", "not", "None", ":", "expected", "=", "side_effect", "(", "expected", ")", "with", "open", "(", "index_file", ",", "\"r\"", ")", "as", "file_obj", ":", "contents", "=", "file_obj", ".", "read", "(", ")", "if", "contents", "!=", "expected", ":", "err_msg", "=", "\"\\n\"", "+", "get_diff", "(", "contents", ",", "expected", ",", "index_file", "+", "\".actual\"", ",", "index_file", "+", "\".expected\"", ",", ")", "raise", "ValueError", "(", "err_msg", ")", "else", ":", "rel_name", "=", "os", ".", "path", ".", "relpath", "(", "index_file", ",", "_ROOT_DIR", ")", "msg", "=", "\"{} contents are as expected.\"", ".", "format", "(", "rel_name", ")", "print", "(", "msg", ")" ]
Modify item data and commit to repo . Git objects are immutable to save means adding a new item
def save ( self , msg = None ) : if msg is None : msg = 'Saving %s' % self . name log . debug ( msg ) self . repo . addItem ( self , msg )
10,154
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L193-L203
[ "def", "_get_bios_boot_resource", "(", "self", ",", "data", ")", ":", "try", ":", "boot_uri", "=", "data", "[", "'links'", "]", "[", "'Boot'", "]", "[", "'href'", "]", "except", "KeyError", ":", "msg", "=", "(", "'Boot resource not found.'", ")", "raise", "exception", ".", "IloCommandNotSupportedError", "(", "msg", ")", "status", ",", "headers", ",", "boot_settings", "=", "self", ".", "_rest_get", "(", "boot_uri", ")", "if", "status", "!=", "200", ":", "msg", "=", "self", ".", "_get_extended_error", "(", "boot_settings", ")", "raise", "exception", ".", "IloError", "(", "msg", ")", "return", "boot_settings" ]
Check out file data to path .
def checkout ( self , path ) : if os . path . isdir ( path ) : path = os . path . join ( path , self . name ) try : log . debug ( 'Checking out %s to %s' % ( self . path , path ) ) f = open ( path , 'w' ) f . write ( self . data ( ) ) f . close ( ) return True except Exception , e : raise ItemError ( e )
10,155
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L205-L222
[ "def", "list_distributions", "(", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "retries", "=", "10", "sleep", "=", "6", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "Items", "=", "[", "]", "while", "retries", ":", "try", ":", "log", ".", "debug", "(", "'Garnering list of CloudFront distributions'", ")", "Marker", "=", "''", "while", "Marker", "is", "not", "None", ":", "ret", "=", "conn", ".", "list_distributions", "(", "Marker", "=", "Marker", ")", "Items", "+=", "ret", ".", "get", "(", "'DistributionList'", ",", "{", "}", ")", ".", "get", "(", "'Items'", ",", "[", "]", ")", "Marker", "=", "ret", ".", "get", "(", "'DistributionList'", ",", "{", "}", ")", ".", "get", "(", "'NextMarker'", ")", "return", "Items", "except", "botocore", ".", "exceptions", ".", "ParamValidationError", "as", "err", ":", "raise", "SaltInvocationError", "(", "str", "(", "err", ")", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "err", ":", "if", "retries", "and", "err", ".", "response", ".", "get", "(", "'Error'", ",", "{", "}", ")", ".", "get", "(", "'Code'", ")", "==", "'Throttling'", ":", "retries", "-=", "1", "log", ".", "debug", "(", "'Throttled by AWS API, retrying in %s seconds...'", ",", "sleep", ")", "time", ".", "sleep", "(", "sleep", ")", "continue", "log", ".", "error", "(", "'Failed to list CloudFront distributions: %s'", ",", "err", ".", "message", ")", "return", "None" ]
Add version to repo object store set repo head to version sha .
def save ( self , message ) : self . commit . message = message self . commit . tree = self . tree #TODO: store new blobs only for item in self . tree . items ( ) : self . repo . object_store . add_object ( item . blob ) self . repo . object_store . add_object ( self . tree ) # set HEAD to new commit self . repo . object_store . add_object ( self . commit ) self . repo . refs [ 'refs/heads/master' ] = self . commit . id
10,156
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L336-L351
[ "def", "validate", "(", "self", ",", "data", ")", ":", "if", "data", "is", "not", "None", "and", "not", "isinstance", "(", "data", ",", "dict", ")", ":", "raise", "serializers", ".", "ValidationError", "(", "\"Invalid data\"", ")", "try", ":", "profiles", "=", "[", "ev", "[", "\"profile\"", "]", "for", "ev", "in", "data", ".", "get", "(", "\"encoded_videos\"", ",", "[", "]", ")", "]", "if", "len", "(", "profiles", ")", "!=", "len", "(", "set", "(", "profiles", ")", ")", ":", "raise", "serializers", ".", "ValidationError", "(", "\"Invalid data: duplicate profiles\"", ")", "except", "KeyError", ":", "raise", "serializers", ".", "ValidationError", "(", "\"profile required for deserializing\"", ")", "except", "TypeError", ":", "raise", "serializers", ".", "ValidationError", "(", "\"profile field needs to be a profile_name (str)\"", ")", "# Clean course_video list from any invalid data.", "course_videos", "=", "[", "(", "course_video", ",", "image", ")", "for", "course_video", ",", "image", "in", "data", ".", "get", "(", "'courses'", ",", "[", "]", ")", "if", "course_video", "]", "data", "[", "'courses'", "]", "=", "course_videos", "return", "data" ]
Create a new version of a repo . Local object .
def new ( self , repo ) : #TODO: subclass Commit, pass parent as init param try : # create new commit instance and set metadata commit = Commit ( ) author = os . environ . get ( 'USER' ) commit . author = commit . committer = author commit . commit_time = commit . author_time = int ( time ( ) ) tz = parse_timezone ( '-0200' ) [ 0 ] commit . commit_timezone = commit . author_timezone = tz commit . encoding = "UTF-8" commit . message = '' # set previous version as parent to this one parent = repo . versions ( - 1 ) if parent : commit . parents = [ parent . id ] # create new tree, add entries from previous version tree = Tree ( ) curr = repo . versions ( - 1 ) if curr : for item in curr . items ( ) : tree . addItem ( item ) commit . tree = tree . id # create new version, and add tree version = Version ( repo = repo , commit = commit , tree = tree ) return version except Exception , e : traceback . print_exc ( ) return VersionError ( e )
10,157
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/repo/version.py#L354-L393
[ "def", "for_sponsor", "(", "self", ",", "sponsor", ",", "include_cancelled", "=", "False", ")", ":", "sponsoring_filter", "=", "(", "Q", "(", "sponsors", "=", "sponsor", ")", "|", "(", "Q", "(", "sponsors", "=", "None", ")", "&", "Q", "(", "activity__sponsors", "=", "sponsor", ")", ")", ")", "sched_acts", "=", "(", "EighthScheduledActivity", ".", "objects", ".", "exclude", "(", "activity__deleted", "=", "True", ")", ".", "filter", "(", "sponsoring_filter", ")", ".", "distinct", "(", ")", ")", "if", "not", "include_cancelled", ":", "sched_acts", "=", "sched_acts", ".", "exclude", "(", "cancelled", "=", "True", ")", "return", "sched_acts" ]
Prompts user for confirmation .
def confirm ( prompt = None , resp = False ) : if prompt is None : prompt = 'Confirm' if resp : prompt = '%s [%s]|%s: ' % ( prompt , 'y' , 'n' ) else : prompt = '%s [%s]|%s: ' % ( prompt , 'n' , 'y' ) while True : ans = raw_input ( prompt ) if not ans : return resp if ans not in [ 'y' , 'Y' , 'n' , 'N' ] : print 'please enter y or n.' continue if ans == 'y' or ans == 'Y' : return True if ans == 'n' or ans == 'N' : return False
10,158
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L17-L44
[ "def", "eventFilter", "(", "self", ",", "widget", ",", "event", ")", ":", "if", "event", ".", "type", "(", ")", "==", "QEvent", ".", "KeyPress", "and", "event", ".", "key", "(", ")", "==", "Qt", ".", "Key_Delete", ":", "index", "=", "self", ".", "view", "(", ")", ".", "currentIndex", "(", ")", ".", "row", "(", ")", "if", "index", ">=", "EXTERNAL_PATHS", ":", "# Remove item and update the view.\r", "self", ".", "removeItem", "(", "index", ")", "self", ".", "showPopup", "(", ")", "# Set the view selection so that it doesn't bounce around.\r", "new_index", "=", "min", "(", "self", ".", "count", "(", ")", "-", "1", ",", "index", ")", "new_index", "=", "0", "if", "new_index", "<", "EXTERNAL_PATHS", "else", "new_index", "self", ".", "view", "(", ")", ".", "setCurrentIndex", "(", "self", ".", "model", "(", ")", ".", "index", "(", "new_index", ",", "0", ")", ")", "self", ".", "setCurrentIndex", "(", "new_index", ")", "return", "True", "return", "QComboBox", ".", "eventFilter", "(", "self", ",", "widget", ",", "event", ")" ]
Prompts user for raw input .
def prompt ( name , default ) : value = raw_input ( '%s [%s]: ' % ( name , default ) ) if not value : value = default return value
10,159
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L46-L55
[ "def", "interior_nesting", "(", "cls", ",", "elem1", ",", "xpath", ",", "namespaces", "=", "None", ")", ":", "for", "elem2", "in", "elem1", ".", "xpath", "(", "xpath", ",", "namespaces", "=", "namespaces", ")", ":", "child_elem1", "=", "etree", ".", "Element", "(", "elem1", ".", "tag", ")", "for", "k", "in", "elem1", ".", "attrib", ":", "child_elem1", ".", "set", "(", "k", ",", "elem1", ".", "get", "(", "k", ")", ")", "child_elem1", ".", "text", ",", "elem2", ".", "text", "=", "elem2", ".", "text", ",", "''", "for", "ch", "in", "elem2", ".", "getchildren", "(", ")", ":", "child_elem1", ".", "append", "(", "ch", ")", "elem2", ".", "insert", "(", "0", ",", "child_elem1", ")", "XML", ".", "replace_with_contents", "(", "elem1", ")" ]
Creates a new Repo class instance at url .
def new ( url ) : from grit import Repo return Repo . new ( url = url , bare = True )
10,160
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L57-L66
[ "def", "run", "(", "self", ")", ":", "response", "=", "None", "root", "=", "tkinter", ".", "Tk", "(", ")", "root", ".", "withdraw", "(", ")", "while", "response", "is", "not", "True", ":", "response", "=", "tkinter", ".", "messagebox", ".", "askokcancel", "(", "title", "=", "self", ".", "title", ",", "message", "=", "self", ".", "pre_message", ")", "if", "self", ".", "post_message", ":", "print", "(", "self", ".", "post_message", ")", "self", ".", "exit_time", "=", "time", ".", "time", "(", ")" ]
Checks out latest version of item or repository .
def checkout ( url , version = None ) : from grit import Repo r = Repo ( url ) def _write ( item ) : log . debug ( 'writing: %s' % item . name ) if item . type != 'blob' : return if r . type in [ 'repo' , 'proxy' , 'local' ] : path = os . path . join ( r . name , item . path ) pdir = os . path . dirname ( path ) if not os . path . isdir ( pdir ) : os . makedirs ( pdir ) else : path = item . name f = open ( path , 'w' ) f . write ( item . data ( ) ) f . close ( ) if r . type == 'blob' : _write ( r ) else : items = r . items ( ) count = 1 total = len ( items ) while count <= total : print '[%s/%s] %0.2f%%' % ( count , total , ( float ( count ) / total ) * 100 ) , '*' * count , '\r' , _write ( items [ count - 1 ] ) count += 1 sys . stdout . flush ( ) print
10,161
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L68-L105
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "private_file", "=", "self", ".", "get_private_file", "(", ")", "if", "not", "self", ".", "can_access_file", "(", "private_file", ")", ":", "return", "HttpResponseForbidden", "(", "'Private storage access denied'", ")", "if", "not", "private_file", ".", "exists", "(", ")", ":", "return", "self", ".", "serve_file_not_found", "(", "private_file", ")", "else", ":", "return", "self", ".", "serve_file", "(", "private_file", ")" ]
Check in files to a repository .
def checkin ( url , files , message = None ) : from grit import Repo , Item r = Repo ( url ) if not files : raise GritError ( 'No files' ) def _write ( path ) : item = Item . from_path ( repo = r , path = path ) if r . isLocal ( ) : v . addItem ( item = item ) else : r . upload ( filename = os . path . basename ( path ) , filedata = open ( path , 'r' ) . read ( ) ) if r . isLocal ( ) : v = r . addVersion ( ) count = 1 total = len ( files ) while count <= total : print '[%s/%s] %0.2f%%' % ( count , total , ( float ( count ) / total ) * 100 ) , '*' * count , '\r' , _write ( os . path . abspath ( files [ count - 1 ] ) ) count += 1 sys . stdout . flush ( ) if message is None : message = 'Publishing %s' % ', ' . join ( files ) if r . isLocal ( ) : v . save ( message = message ) print
10,162
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/cmd/cli.py#L107-L140
[ "def", "get_accounts", "(", "cls", ",", "soco", "=", "None", ")", ":", "root", "=", "XML", ".", "fromstring", "(", "cls", ".", "_get_account_xml", "(", "soco", ")", ")", "# _get_account_xml returns an ElementTree element like this:", "# <ZPSupportInfo type=\"User\">", "# <Accounts", "# LastUpdateDevice=\"RINCON_000XXXXXXXX400\"", "# Version=\"8\" NextSerialNum=\"5\">", "# <Account Type=\"2311\" SerialNum=\"1\">", "# <UN>12345678</UN>", "# <MD>1</MD>", "# <NN></NN>", "# <OADevID></OADevID>", "# <Key></Key>", "# </Account>", "# <Account Type=\"41735\" SerialNum=\"3\" Deleted=\"1\">", "# <UN></UN>", "# <MD>1</MD>", "# <NN>Nickname</NN>", "# <OADevID></OADevID>", "# <Key></Key>", "# </Account>", "# ...", "# <Accounts />", "xml_accounts", "=", "root", ".", "findall", "(", "'.//Account'", ")", "result", "=", "{", "}", "for", "xml_account", "in", "xml_accounts", ":", "serial_number", "=", "xml_account", ".", "get", "(", "'SerialNum'", ")", "is_deleted", "=", "True", "if", "xml_account", ".", "get", "(", "'Deleted'", ")", "==", "'1'", "else", "False", "# cls._all_accounts is a weakvaluedict keyed by serial number.", "# We use it as a database to store details of the accounts we", "# know about. We need to update it with info obtained from the", "# XML just obtained, so (1) check to see if we already have an", "# entry in cls._all_accounts for the account we have found in", "# XML; (2) if so, delete it if the XML says it has been deleted;", "# and (3) if not, create an entry for it", "if", "cls", ".", "_all_accounts", ".", "get", "(", "serial_number", ")", ":", "# We have an existing entry in our database. Do we need to", "# delete it?", "if", "is_deleted", ":", "# Yes, so delete it and move to the next XML account", "del", "cls", ".", "_all_accounts", "[", "serial_number", "]", "continue", "else", ":", "# No, so load up its details, ready to update them", "account", "=", "cls", ".", "_all_accounts", ".", "get", "(", "serial_number", ")", "else", ":", "# We have no existing entry for this account", "if", "is_deleted", ":", "# but it is marked as deleted, so we don't need one", "continue", "# If it is not marked as deleted, we need to create an entry", "account", "=", "Account", "(", ")", "account", ".", "serial_number", "=", "serial_number", "cls", ".", "_all_accounts", "[", "serial_number", "]", "=", "account", "# Now, update the entry in our database with the details from XML", "account", ".", "service_type", "=", "xml_account", ".", "get", "(", "'Type'", ")", "account", ".", "deleted", "=", "is_deleted", "account", ".", "username", "=", "xml_account", ".", "findtext", "(", "'UN'", ")", "# Not sure what 'MD' stands for. Metadata? May Delete?", "account", ".", "metadata", "=", "xml_account", ".", "findtext", "(", "'MD'", ")", "account", ".", "nickname", "=", "xml_account", ".", "findtext", "(", "'NN'", ")", "account", ".", "oa_device_id", "=", "xml_account", ".", "findtext", "(", "'OADevID'", ")", "account", ".", "key", "=", "xml_account", ".", "findtext", "(", "'Key'", ")", "result", "[", "serial_number", "]", "=", "account", "# There is always a TuneIn account, but it is handled separately", "# by Sonos, and does not appear in the xml account data. We", "# need to add it ourselves.", "tunein", "=", "Account", "(", ")", "tunein", ".", "service_type", "=", "'65031'", "# Is this always the case?", "tunein", ".", "deleted", "=", "False", "tunein", ".", "username", "=", "''", "tunein", ".", "metadata", "=", "''", "tunein", ".", "nickname", "=", "''", "tunein", ".", "oa_device_id", "=", "''", "tunein", ".", "key", "=", "''", "tunein", ".", "serial_number", "=", "'0'", "result", "[", "'0'", "]", "=", "tunein", "return", "result" ]
Retrieve an object by making a GET request to Transifex .
def get ( cls , * * kwargs ) : fields = { } for field in cls . url_fields : value = kwargs . pop ( field , None ) if value is None : cls . _handle_wrong_field ( field , ATTR_TYPE_URL ) fields [ field ] = value # Create an instance of the model class and make the GET request model = cls ( * * fields ) model . _populate ( * * kwargs ) return model
10,163
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L109-L139
[ "def", "coord_wrap", "(", "self", ",", "*", "args", ")", ":", "yield", "from", "self", ".", "cell", ".", "coord", ".", "start", "(", "self", ")", "yield", "from", "self", ".", "coro", "(", "*", "args", ")", "yield", "from", "self", ".", "cell", ".", "coord", ".", "finish", "(", "self", ")" ]
Save the instance to the remote Transifex server .
def save ( self , * * fields ) : for field in fields : if field in self . writable_fields : setattr ( self , field , fields [ field ] ) else : self . _handle_wrong_field ( field , ATTR_TYPE_WRITE ) if self . _populated_fields : self . _update ( * * self . _modified_fields ) else : self . _create ( * * self . _modified_fields )
10,164
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L213-L236
[ "def", "delete_everything", "(", "self", ")", ":", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "basebackups", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of a base backup'", ")", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "wal_directory", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of wal logs'", ")", "if", "self", ".", "deleter", ":", "self", ".", "deleter", ".", "close", "(", ")" ]
Get the resource from a remote Transifex server .
def _get ( self , * * kwargs ) : path = self . _construct_path_to_item ( ) return self . _http . get ( path )
10,165
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L246-L249
[ "def", "extract_journal_reference", "(", "line", ",", "override_kbs_files", "=", "None", ")", ":", "kbs", "=", "get_kbs", "(", "custom_kbs_files", "=", "override_kbs_files", ")", "references", ",", "dummy_m", ",", "dummy_c", ",", "dummy_co", "=", "parse_reference_line", "(", "line", ",", "kbs", ")", "for", "elements", "in", "references", ":", "for", "el", "in", "elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", ":", "return", "el" ]
Create a resource in the remote Transifex server .
def _create ( self , * * kwargs ) : path = self . _construct_path_to_collection ( ) # Use the fields for which we have values for field in self . writable_fields : try : value = getattr ( self , field ) kwargs [ field ] = value except AttributeError : pass return self . _http . post ( path , json . dumps ( kwargs ) )
10,166
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L251-L262
[ "def", "extract_journal_reference", "(", "line", ",", "override_kbs_files", "=", "None", ")", ":", "kbs", "=", "get_kbs", "(", "custom_kbs_files", "=", "override_kbs_files", ")", "references", ",", "dummy_m", ",", "dummy_c", ",", "dummy_co", "=", "parse_reference_line", "(", "line", ",", "kbs", ")", "for", "elements", "in", "references", ":", "for", "el", "in", "elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", ":", "return", "el" ]
Update a resource in a remote Transifex server .
def _update ( self , * * kwargs ) : path = self . _construct_path_to_item ( ) if not kwargs : return return self . _http . put ( path , json . dumps ( kwargs ) )
10,167
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L264-L269
[ "def", "extract_journal_reference", "(", "line", ",", "override_kbs_files", "=", "None", ")", ":", "kbs", "=", "get_kbs", "(", "custom_kbs_files", "=", "override_kbs_files", ")", "references", ",", "dummy_m", ",", "dummy_c", ",", "dummy_co", "=", "parse_reference_line", "(", "line", ",", "kbs", ")", "for", "elements", "in", "references", ":", "for", "el", "in", "elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", ":", "return", "el" ]
Delete a resource from a remote Transifex server .
def _delete ( self , * * kwargs ) : path = self . _construct_path_to_item ( ) return self . _http . delete ( path )
10,168
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L271-L274
[ "def", "extract_journal_reference", "(", "line", ",", "override_kbs_files", "=", "None", ")", ":", "kbs", "=", "get_kbs", "(", "custom_kbs_files", "=", "override_kbs_files", ")", "references", ",", "dummy_m", ",", "dummy_c", ",", "dummy_co", "=", "parse_reference_line", "(", "line", ",", "kbs", ")", "for", "elements", "in", "references", ":", "for", "el", "in", "elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", ":", "return", "el" ]
Create a dictionary of parameters used in URLs for this model .
def get_url_parameters ( self ) : url_fields = { } for field in self . url_fields : url_fields [ field ] = getattr ( self , field ) return url_fields
10,169
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L285-L290
[ "def", "wait_for_compactions", "(", "self", ",", "timeout", "=", "120", ")", ":", "pattern", "=", "re", ".", "compile", "(", "\"pending tasks: 0\"", ")", "start", "=", "time", ".", "time", "(", ")", "while", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "output", ",", "err", ",", "rc", "=", "self", ".", "nodetool", "(", "\"compactionstats\"", ")", "if", "pattern", ".", "search", "(", "output", ")", ":", "return", "time", ".", "sleep", "(", "1", ")", "raise", "TimeoutError", "(", "\"{} [{}] Compactions did not finish in {} seconds\"", ".", "format", "(", "time", ".", "strftime", "(", "\"%d %b %Y %H:%M:%S\"", ",", "time", ".", "gmtime", "(", ")", ")", ",", "self", ".", "name", ",", "timeout", ")", ")" ]
Raise an exception whenever an invalid attribute with the given name was attempted to be set to or retrieved from this model class .
def _handle_wrong_field ( cls , field_name , field_type ) : if field_type == ATTR_TYPE_READ : field_type = 'readable' elif field_type == ATTR_TYPE_WRITE : field_type = 'writable' elif field_type == ATTR_TYPE_URL : field_type = 'URL' else : raise AttributeError ( 'Invalid attribute type: {}' . format ( field_type ) ) msg = '{} has no {} attribute "{}"' . format ( cls . __name__ , field_type , field_name ) _logger . error ( msg ) raise AttributeError ( msg )
10,170
https://github.com/transifex/transifex-python-library/blob/9fea86b718973de35ccca6d54bd1f445c9632406/txlib/api/base.py#L307-L333
[ "def", "ranker", "(", "self", ",", "X", ",", "meta", ")", ":", "# total score is just a sum of each row", "total_score", "=", "X", ".", "sum", "(", "axis", "=", "1", ")", ".", "transpose", "(", ")", "total_score", "=", "np", ".", "squeeze", "(", "np", ".", "asarray", "(", "total_score", ")", ")", "# matrix to array", "ranks", "=", "total_score", ".", "argsort", "(", ")", "ranks", "=", "ranks", "[", ":", ":", "-", "1", "]", "# sort the list of dicts according to ranks", "sorted_meta", "=", "[", "meta", "[", "r", "]", "for", "r", "in", "ranks", "]", "sorted_X", "=", "X", "[", "ranks", "]", "return", "(", "sorted_X", ",", "sorted_meta", ")" ]
Adds rules to global http mock .
def update_http_rules ( rules , content_type = 'text/plain' ) : for kw in deepcopy ( rules ) : kw [ 'url' ] = re . compile ( kw [ 'url' ] ) # ensure headers dict for at least have a default content type if 'Content-Type' not in kw . get ( 'headers' , { } ) : kw [ 'headers' ] = dict ( kw . get ( 'headers' , { } ) , * * { 'Content-Type' : content_type , } ) method = kw . pop ( 'method' ) url = kw . pop ( 'url' ) http_mock . register_uri ( method , url , * * kw )
10,171
https://github.com/peopledoc/mock-services/blob/fd3838280df8869725b538768357435eedf299c1/mock_services/rules.py#L33-L75
[ "def", "_decode_caveat_v1", "(", "key", ",", "caveat", ")", ":", "data", "=", "base64", ".", "b64decode", "(", "caveat", ")", ".", "decode", "(", "'utf-8'", ")", "wrapper", "=", "json", ".", "loads", "(", "data", ")", "tp_public_key", "=", "nacl", ".", "public", ".", "PublicKey", "(", "base64", ".", "b64decode", "(", "wrapper", "[", "'ThirdPartyPublicKey'", "]", ")", ")", "if", "key", ".", "public_key", ".", "key", "!=", "tp_public_key", ":", "raise", "Exception", "(", "'public key mismatch'", ")", "# TODO", "if", "wrapper", ".", "get", "(", "'FirstPartyPublicKey'", ",", "None", ")", "is", "None", ":", "raise", "Exception", "(", "'target service public key not specified'", ")", "# The encrypted string is base64 encoded in the JSON representation.", "secret", "=", "base64", ".", "b64decode", "(", "wrapper", ".", "get", "(", "'Id'", ")", ")", "nonce", "=", "base64", ".", "b64decode", "(", "wrapper", ".", "get", "(", "'Nonce'", ")", ")", "fp_public_key", "=", "nacl", ".", "public", ".", "PublicKey", "(", "base64", ".", "b64decode", "(", "wrapper", ".", "get", "(", "'FirstPartyPublicKey'", ")", ")", ")", "box", "=", "nacl", ".", "public", ".", "Box", "(", "key", ".", "key", ",", "fp_public_key", ")", "c", "=", "box", ".", "decrypt", "(", "secret", ",", "nonce", ")", "record", "=", "json", ".", "loads", "(", "c", ".", "decode", "(", "'utf-8'", ")", ")", "fp_key", "=", "nacl", ".", "public", ".", "PublicKey", "(", "base64", ".", "b64decode", "(", "wrapper", ".", "get", "(", "'FirstPartyPublicKey'", ")", ")", ")", "return", "ThirdPartyCaveatInfo", "(", "condition", "=", "record", ".", "get", "(", "'Condition'", ")", ",", "first_party_public_key", "=", "PublicKey", "(", "fp_key", ")", ",", "third_party_key_pair", "=", "key", ",", "root_key", "=", "base64", ".", "b64decode", "(", "record", ".", "get", "(", "'RootKey'", ")", ")", ",", "caveat", "=", "caveat", ",", "id", "=", "None", ",", "version", "=", "VERSION_1", ",", "namespace", "=", "legacy_namespace", "(", ")", ")" ]
Append last task to task history .
def get_task_history ( last_task ) : if hasattr ( last_task , 'branch' ) and last_task . branch : return elif hasattr ( last_task , 'hide' ) and last_task . hide : return else : return get_func_info ( last_task )
10,172
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L30-L37
[ "def", "connection", "(", "cls", ")", ":", "local", "=", "cls", ".", "_threadlocal", "if", "not", "getattr", "(", "local", ",", "'connection'", ",", "None", ")", ":", "# Make sure these variables are no longer affected by other threads.", "local", ".", "user", "=", "cls", ".", "user", "local", ".", "password", "=", "cls", ".", "password", "local", ".", "site", "=", "cls", ".", "site", "local", ".", "timeout", "=", "cls", ".", "timeout", "local", ".", "headers", "=", "cls", ".", "headers", "local", ".", "format", "=", "cls", ".", "format", "local", ".", "version", "=", "cls", ".", "version", "local", ".", "url", "=", "cls", ".", "url", "if", "cls", ".", "site", "is", "None", ":", "raise", "ValueError", "(", "\"No shopify session is active\"", ")", "local", ".", "connection", "=", "ShopifyConnection", "(", "cls", ".", "site", ",", "cls", ".", "user", ",", "cls", ".", "password", ",", "cls", ".", "timeout", ",", "cls", ".", "format", ")", "return", "local", ".", "connection" ]
Retrieve a function s information .
def get_func_info ( func ) : name = func . __name__ doc = func . __doc__ or "" try : nicename = func . description except AttributeError : if doc : nicename = doc . split ( '\n' ) [ 0 ] if len ( nicename ) > 80 : nicename = name else : nicename = name parameters = [ ] try : closure = func . func_closure except AttributeError : closure = func . __closure__ try : varnames = func . func_code . co_freevars except AttributeError : varnames = func . __code__ . co_freevars if closure : for index , arg in enumerate ( closure ) : if not callable ( arg . cell_contents ) : parameters . append ( ( varnames [ index ] , text_type ( arg . cell_contents ) ) ) return ( { "nicename" : nicename , "doc" : doc , "parameters" : parameters , "name" : name , "time" : str ( datetime . datetime . now ( ) ) , "hostname" : socket . gethostname ( ) , } )
10,173
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L40-L75
[ "def", "DeleteSnapshot", "(", "self", ",", "names", "=", "None", ")", ":", "if", "names", "is", "None", ":", "names", "=", "self", ".", "GetSnapshots", "(", ")", "requests_lst", "=", "[", "]", "for", "name", "in", "names", ":", "name_links", "=", "[", "obj", "[", "'links'", "]", "for", "obj", "in", "self", ".", "data", "[", "'details'", "]", "[", "'snapshots'", "]", "if", "obj", "[", "'name'", "]", "==", "name", "]", "[", "0", "]", "requests_lst", ".", "append", "(", "clc", ".", "v2", ".", "Requests", "(", "clc", ".", "v2", ".", "API", ".", "Call", "(", "'DELETE'", ",", "[", "obj", "[", "'href'", "]", "for", "obj", "in", "name_links", "if", "obj", "[", "'rel'", "]", "==", "'delete'", "]", "[", "0", "]", ",", "session", "=", "self", ".", "session", ")", ",", "alias", "=", "self", ".", "alias", ",", "session", "=", "self", ".", "session", ")", ")", "return", "(", "sum", "(", "requests_lst", ")", ")" ]
Return function info go through lists recursively .
def get_workflow_info ( func_list ) : funcs = [ ] for item in func_list : if item is None : continue if isinstance ( item , list ) : funcs . append ( get_workflow_info ( item ) ) else : funcs . append ( get_func_info ( item ) ) return funcs
10,174
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/utils.py#L78-L88
[ "def", "if_sqlserver_disable_constraints_triggers", "(", "session", ":", "SqlASession", ",", "tablename", ":", "str", ")", "->", "None", ":", "with", "if_sqlserver_disable_constraints", "(", "session", ",", "tablename", ")", ":", "with", "if_sqlserver_disable_triggers", "(", "session", ",", "tablename", ")", ":", "yield" ]
Copy a properly formatted context into a mutable data structure .
def _copy_context_into_mutable ( context ) : def make_mutable ( val ) : if isinstance ( val , Mapping ) : return dict ( val ) else : return val if not isinstance ( context , ( str , Mapping ) ) : try : return [ make_mutable ( val ) for val in context ] except TypeError : pass return make_mutable ( context )
10,175
https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/data_formats.py#L17-L31
[ "async", "def", "throttle_update_heaters", "(", "self", ")", ":", "if", "(", "self", ".", "_throttle_time", "is", "not", "None", "and", "dt", ".", "datetime", ".", "now", "(", ")", "-", "self", ".", "_throttle_time", "<", "MIN_TIME_BETWEEN_UPDATES", ")", ":", "return", "self", ".", "_throttle_time", "=", "dt", ".", "datetime", ".", "now", "(", ")", "await", "self", ".", "update_heaters", "(", ")" ]
make all the models for a dataset
def make_dataset_models ( dataset , schemas_and_tables , metadata_dict = None , version : int = 1 , include_contacts = False ) : if metadata_dict is None : metadata_dict = { } validate_types ( schemas_and_tables ) dataset_dict = { } cell_segment_model = make_cell_segment_model ( dataset , version = version ) dataset_dict [ root_model_name . lower ( ) ] = cell_segment_model for schema_name , table_name in schemas_and_tables : model_key = table_name metadata = metadata_dict . get ( table_name , None ) dataset_dict [ model_key ] = make_annotation_model ( dataset , schema_name , table_name , table_metadata = metadata , version = version ) if include_contacts : contact_model = make_annotation_model_from_schema ( dataset , 'contact' , Contact , version = version ) dataset_dict [ 'contact' ] = contact_model return dataset_dict
10,176
https://github.com/seung-lab/EMAnnotationSchemas/blob/ca81eff0f449bd7eb0392e0982db8f3636446a9e/emannotationschemas/models.py#L112-L158
[ "def", "waitForEvent", "(", "self", ",", "event_name", ",", "predicate", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "deadline", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "time", ".", "time", "(", ")", "<=", "deadline", ":", "# Calculate the max timeout for the next event rpc call.", "rpc_timeout", "=", "deadline", "-", "time", ".", "time", "(", ")", "if", "rpc_timeout", "<", "0", ":", "break", "# A single RPC call cannot exceed MAX_TIMEOUT.", "rpc_timeout", "=", "min", "(", "rpc_timeout", ",", "MAX_TIMEOUT", ")", "try", ":", "event", "=", "self", ".", "waitAndGet", "(", "event_name", ",", "rpc_timeout", ")", "except", "TimeoutError", ":", "# Ignoring TimeoutError since we need to throw one with a more", "# specific message.", "break", "if", "predicate", "(", "event", ")", ":", "return", "event", "raise", "TimeoutError", "(", "self", ".", "_ad", ",", "'Timed out after %ss waiting for an \"%s\" event that satisfies the '", "'predicate \"%s\".'", "%", "(", "timeout", ",", "event_name", ",", "predicate", ".", "__name__", ")", ")" ]
Return the key referring to this object
def _key_name ( self ) : # type: () -> str if self . _key is not None : return self . _key return self . __class__ . __name__ . lower ( )
10,177
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L77-L87
[ "def", "write", "(", "self", ",", "splits", "=", "None", ",", "mergers", "=", "None", ",", "dividends", "=", "None", ",", "stock_dividends", "=", "None", ")", ":", "self", ".", "write_frame", "(", "'splits'", ",", "splits", ")", "self", ".", "write_frame", "(", "'mergers'", ",", "mergers", ")", "self", ".", "write_dividend_data", "(", "dividends", ",", "stock_dividends", ")", "# Use IF NOT EXISTS here to allow multiple writes if desired.", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS splits_sids \"", "\"ON splits(sid)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS splits_effective_date \"", "\"ON splits(effective_date)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS mergers_sids \"", "\"ON mergers(sid)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS mergers_effective_date \"", "\"ON mergers(effective_date)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS dividends_sid \"", "\"ON dividends(sid)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS dividends_effective_date \"", "\"ON dividends(effective_date)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS dividend_payouts_sid \"", "\"ON dividend_payouts(sid)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS dividends_payouts_ex_date \"", "\"ON dividend_payouts(ex_date)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS stock_dividend_payouts_sid \"", "\"ON stock_dividend_payouts(sid)\"", ")", "self", ".", "conn", ".", "execute", "(", "\"CREATE INDEX IF NOT EXISTS stock_dividends_payouts_ex_date \"", "\"ON stock_dividend_payouts(ex_date)\"", ")" ]
Return the dotted path representation of this object
def _path ( self ) : # type: () -> str if self . _parent : return '{}.{}' . format ( self . _parent . _path ( ) , self . _key_name ( ) ) return self . _key_name ( )
10,178
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L103-L111
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Convenience function to add an error to this object with line numbers
def _add_error ( self , * args , * * kwargs ) : # type: () -> None if kwargs . get ( 'node' , None ) : # if node specified and not none error = ConfigError . create_from_yaml_node ( * args , * * kwargs ) elif self . _value_node : # default to using the node if we have one error = ConfigError . create_from_yaml_node ( node = self . _value_node , * args , * * kwargs ) else : # no nodes or error_obj to attach error = ConfigError ( * args , * * kwargs ) self . _errors . append ( error )
10,179
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L113-L137
[ "async", "def", "services", "(", "self", ",", "*", ",", "dc", "=", "None", ",", "watch", "=", "None", ",", "consistency", "=", "None", ")", ":", "params", "=", "{", "\"dc\"", ":", "dc", "}", "response", "=", "await", "self", ".", "_api", ".", "get", "(", "\"/v1/catalog/services\"", ",", "params", "=", "params", ",", "watch", "=", "watch", ",", "consistency", "=", "consistency", ")", "return", "consul", "(", "response", ")" ]
Recursively get errors from descendants
def _get_descendants_errors ( self ) : # type: () -> List(ConfigError) descendants_errors = [ ] if hasattr ( self , '_children' ) : if isinstance ( self . _children , ( list , tuple ) ) : for c in self . _children : descendants_errors += c . _get_all_errors ( ) elif isinstance ( self . _children , dict ) : for c in self . _children . values ( ) : descendants_errors += c . _get_all_errors ( ) return descendants_errors
10,180
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L139-L152
[ "def", "fix_auth_url_version_prefix", "(", "auth_url", ")", ":", "auth_url", "=", "_augment_url_with_version", "(", "auth_url", ")", "url_fixed", "=", "False", "if", "get_keystone_version", "(", ")", ">=", "3", "and", "has_in_url_path", "(", "auth_url", ",", "[", "\"/v2.0\"", "]", ")", ":", "url_fixed", "=", "True", "auth_url", "=", "url_path_replace", "(", "auth_url", ",", "\"/v2.0\"", ",", "\"/v3\"", ",", "1", ")", "return", "auth_url", ",", "url_fixed" ]
Run validation save errors to object in self . _errors
def _validate ( self ) : # type: () -> None # class can specify it's empty obj -- list would have empty of [] self . _errors = [ ] self . _validate_type ( ) if self . is_valid ( ) : self . _validate_value ( )
10,181
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L163-L171
[ "def", "derivativeZ", "(", "self", ",", "mLvl", ",", "pLvl", ",", "MedShk", ")", ":", "xLvl", "=", "self", ".", "xFunc", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dxdShk", "=", "self", ".", "xFunc", ".", "derivativeZ", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dcdx", "=", "self", ".", "cFunc", ".", "derivativeX", "(", "xLvl", ",", "MedShk", ")", "dcdShk", "=", "dxdShk", "*", "dcdx", "+", "self", ".", "cFunc", ".", "derivativeY", "(", "xLvl", ",", "MedShk", ")", "dMeddShk", "=", "(", "dxdShk", "-", "dcdShk", ")", "/", "self", ".", "MedPrice", "return", "dcdShk", ",", "dMeddShk" ]
Validation to ensure value is the correct type
def _validate_type ( self ) : # type: () -> None if not isinstance ( self . _value , self . _type ) : title = '{} has an invalid type' . format ( self . _key_name ( ) ) description = '{} must be a {}' . format ( self . _key_name ( ) , self . _type . __name__ ) self . _add_error ( title = title , description = description )
10,182
https://github.com/dropseed/configyaml/blob/d008f251530d054c2d1fb3e8ac1a9030436134c8/configyaml/config/base.py#L173-L179
[ "def", "update_data", "(", "self", ")", ":", "url", "=", "(", "'https://www.openhumans.org/api/direct-sharing/project/'", "'members/?access_token={}'", ".", "format", "(", "self", ".", "master_access_token", ")", ")", "results", "=", "get_all_results", "(", "url", ")", "self", ".", "project_data", "=", "dict", "(", ")", "for", "result", "in", "results", ":", "self", ".", "project_data", "[", "result", "[", "'project_member_id'", "]", "]", "=", "result", "if", "len", "(", "result", "[", "'data'", "]", ")", "<", "result", "[", "'file_count'", "]", ":", "member_data", "=", "get_page", "(", "result", "[", "'exchange_member'", "]", ")", "final_data", "=", "member_data", "[", "'data'", "]", "while", "member_data", "[", "'next'", "]", ":", "member_data", "=", "get_page", "(", "member_data", "[", "'next'", "]", ")", "final_data", "=", "final_data", "+", "member_data", "[", "'data'", "]", "self", ".", "project_data", "[", "result", "[", "'project_member_id'", "]", "]", "[", "'data'", "]", "=", "final_data", "return", "self", ".", "project_data" ]
Check if we have at least one snapshot .
def haveSnapshots ( self ) : return os . path . islink ( self . latestLink ) and os . path . isdir ( self . latestLink )
10,183
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L71-L73
[ "def", "wrap", "(", "vtkdataset", ")", ":", "wrappers", "=", "{", "'vtkUnstructuredGrid'", ":", "vtki", ".", "UnstructuredGrid", ",", "'vtkRectilinearGrid'", ":", "vtki", ".", "RectilinearGrid", ",", "'vtkStructuredGrid'", ":", "vtki", ".", "StructuredGrid", ",", "'vtkPolyData'", ":", "vtki", ".", "PolyData", ",", "'vtkImageData'", ":", "vtki", ".", "UniformGrid", ",", "'vtkStructuredPoints'", ":", "vtki", ".", "UniformGrid", ",", "'vtkMultiBlockDataSet'", ":", "vtki", ".", "MultiBlock", ",", "}", "key", "=", "vtkdataset", ".", "GetClassName", "(", ")", "try", ":", "wrapped", "=", "wrappers", "[", "key", "]", "(", "vtkdataset", ")", "except", ":", "logging", ".", "warning", "(", "'VTK data type ({}) is not currently supported by vtki.'", ".", "format", "(", "key", ")", ")", "return", "vtkdataset", "# if not supported just passes the VTK data object", "return", "wrapped" ]
Start a fresh experiment from scratch . Returns self .
def fromScratch ( self ) : assert ( not os . path . lexists ( self . latestLink ) or os . path . islink ( self . latestLink ) ) self . rmR ( self . latestLink ) return self
10,184
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L112-L120
[ "def", "get_literal_kind_affinity", "(", "self", ",", "literal_kind", ")", ":", "if", "literal_kind", "==", "CursorKind", ".", "INTEGER_LITERAL", ":", "return", "[", "TypeKind", ".", "USHORT", ",", "TypeKind", ".", "UINT", ",", "TypeKind", ".", "ULONG", ",", "TypeKind", ".", "ULONGLONG", ",", "TypeKind", ".", "UINT128", ",", "TypeKind", ".", "SHORT", ",", "TypeKind", ".", "INT", ",", "TypeKind", ".", "LONG", ",", "TypeKind", ".", "LONGLONG", ",", "TypeKind", ".", "INT128", ",", "]", "elif", "literal_kind", "==", "CursorKind", ".", "STRING_LITERAL", ":", "return", "[", "TypeKind", ".", "CHAR16", ",", "TypeKind", ".", "CHAR32", ",", "TypeKind", ".", "CHAR_S", ",", "TypeKind", ".", "SCHAR", ",", "TypeKind", ".", "WCHAR", "]", "# DEBUG", "elif", "literal_kind", "==", "CursorKind", ".", "CHARACTER_LITERAL", ":", "return", "[", "TypeKind", ".", "CHAR_U", ",", "TypeKind", ".", "UCHAR", "]", "elif", "literal_kind", "==", "CursorKind", ".", "FLOATING_LITERAL", ":", "return", "[", "TypeKind", ".", "FLOAT", ",", "TypeKind", ".", "DOUBLE", ",", "TypeKind", ".", "LONGDOUBLE", "]", "elif", "literal_kind", "==", "CursorKind", ".", "IMAGINARY_LITERAL", ":", "return", "[", "]", "return", "[", "]" ]
Take a snapshot of the experiment . Returns self .
def snapshot ( self ) : nextSnapshotNum = self . nextSnapshotNum nextSnapshotPath = self . getFullPathToSnapshot ( nextSnapshotNum ) if os . path . lexists ( nextSnapshotPath ) : self . rmR ( nextSnapshotPath ) self . mkdirp ( os . path . join ( nextSnapshotPath , ".experiment" ) ) return self . dump ( nextSnapshotPath ) . __markLatest ( nextSnapshotNum )
10,185
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L136-L146
[ "def", "WriteFD", "(", "self", ",", "Channel", ",", "MessageBuffer", ")", ":", "try", ":", "res", "=", "self", ".", "__m_dllBasic", ".", "CAN_WriteFD", "(", "Channel", ",", "byref", "(", "MessageBuffer", ")", ")", "return", "TPCANStatus", "(", "res", ")", "except", ":", "logger", ".", "error", "(", "\"Exception on PCANBasic.WriteFD\"", ")", "raise" ]
Roll back the experiment to the given snapshot number . Returns self .
def rollback ( self , n = None ) : if n is None : if self . haveSnapshots : return self . fromSnapshot ( self . latestLink ) else : return self . fromScratch ( ) elif isinstance ( n , int ) : loadSnapshotPath = self . getFullPathToSnapshot ( n ) assert ( os . path . isdir ( loadSnapshotPath ) ) return self . __markLatest ( n ) . fromSnapshot ( loadSnapshotPath ) else : raise ValueError ( "n must be int, or None!" )
10,186
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L148-L161
[ "def", "get_unicode_property", "(", "value", ",", "prop", "=", "None", ",", "is_bytes", "=", "False", ")", ":", "if", "prop", "is", "not", "None", ":", "prop", "=", "unidata", ".", "unicode_alias", "[", "'_'", "]", ".", "get", "(", "prop", ",", "prop", ")", "try", ":", "if", "prop", "==", "'generalcategory'", ":", "return", "get_gc_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'script'", ":", "return", "get_script_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'scriptextensions'", ":", "return", "get_script_extension_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'block'", ":", "return", "get_block_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'binary'", ":", "return", "get_binary_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'bidiclass'", ":", "return", "get_bidi_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'bidipairedbrackettype'", ":", "return", "get_bidi_paired_bracket_type_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'age'", ":", "return", "get_age_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'eastasianwidth'", ":", "return", "get_east_asian_width_property", "(", "value", ",", "is_bytes", ")", "elif", "PY35", "and", "prop", "==", "'indicpositionalcategory'", ":", "return", "get_indic_positional_category_property", "(", "value", ",", "is_bytes", ")", "elif", "not", "PY35", "and", "prop", "==", "'indicmatracategory'", ":", "return", "get_indic_positional_category_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'indicsyllabiccategory'", ":", "return", "get_indic_syllabic_category_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'hangulsyllabletype'", ":", "return", "get_hangul_syllable_type_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'decompositiontype'", ":", "return", "get_decomposition_type_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'canonicalcombiningclass'", ":", "return", "get_canonical_combining_class_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'numerictype'", ":", "return", "get_numeric_type_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'numericvalue'", ":", "return", "get_numeric_value_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'joiningtype'", ":", "return", "get_joining_type_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'joininggroup'", ":", "return", "get_joining_group_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'graphemeclusterbreak'", ":", "return", "get_grapheme_cluster_break_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'linebreak'", ":", "return", "get_line_break_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'sentencebreak'", ":", "return", "get_sentence_break_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'wordbreak'", ":", "return", "get_word_break_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'nfcquickcheck'", ":", "return", "get_nfc_quick_check_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'nfdquickcheck'", ":", "return", "get_nfd_quick_check_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'nfkcquickcheck'", ":", "return", "get_nfkc_quick_check_property", "(", "value", ",", "is_bytes", ")", "elif", "prop", "==", "'nfkdquickcheck'", ":", "return", "get_nfkd_quick_check_property", "(", "value", ",", "is_bytes", ")", "elif", "PY37", "and", "prop", "==", "'verticalorientation'", ":", "return", "get_vertical_orientation_property", "(", "value", ",", "is_bytes", ")", "else", ":", "raise", "ValueError", "(", "'Invalid Unicode property!'", ")", "except", "Exception", ":", "raise", "ValueError", "(", "'Invalid Unicode property!'", ")", "try", ":", "return", "get_gc_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "try", ":", "return", "get_script_extension_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "try", ":", "return", "get_block_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "try", ":", "return", "get_binary_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "try", ":", "return", "get_is_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "try", ":", "return", "get_in_property", "(", "value", ",", "is_bytes", ")", "except", "Exception", ":", "pass", "raise", "ValueError", "(", "'Invalid Unicode property!'", ")" ]
Get the full path to snapshot n .
def getFullPathToSnapshot ( self , n ) : return os . path . join ( self . snapDir , str ( n ) )
10,187
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L201-L203
[ "def", "_valid_config", "(", "self", ",", "settings", ")", ":", "if", "(", "(", "int", "(", "settings", "[", "'environment_temp'", "]", ")", ">", "self", ".", "MAX_BOUND_TEMP", "or", "int", "(", "settings", "[", "'environment_temp'", "]", ")", "<", "self", ".", "MIN_BOUND_TEMP", ")", "or", "(", "int", "(", "settings", "[", "'bean_temp'", "]", ")", ">", "self", ".", "MAX_BOUND_TEMP", "or", "int", "(", "settings", "[", "'bean_temp'", "]", ")", "<", "self", ".", "MIN_BOUND_TEMP", ")", ")", ":", "self", ".", "_log", ".", "error", "(", "'Temperatures are outside of bounds'", ")", "return", "False", "binary", "=", "[", "'drum_motor'", ",", "'chaff_tray'", ",", "'solenoid'", ",", "'cooling_motor'", "]", "for", "item", "in", "binary", ":", "if", "int", "(", "settings", ".", "get", "(", "item", ")", ")", "not", "in", "[", "0", ",", "1", "]", ":", "self", ".", "_log", ".", "error", "(", "'Settings show invalid values'", ")", "return", "False", "return", "True" ]
Return the directory names to preserve under the LastK purge strategy .
def strategyLastK ( kls , n , k = 10 ) : return set ( map ( str , filter ( lambda x : x >= 0 , range ( n , n - k , - 1 ) ) ) )
10,188
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L215-L217
[ "def", "_start_console", "(", "self", ")", ":", "self", ".", "_remote_pipe", "=", "yield", "from", "asyncio_open_serial", "(", "self", ".", "_get_pipe_name", "(", ")", ")", "server", "=", "AsyncioTelnetServer", "(", "reader", "=", "self", ".", "_remote_pipe", ",", "writer", "=", "self", ".", "_remote_pipe", ",", "binary", "=", "True", ",", "echo", "=", "True", ")", "self", ".", "_telnet_server", "=", "yield", "from", "asyncio", ".", "start_server", "(", "server", ".", "run", ",", "self", ".", "_manager", ".", "port_manager", ".", "console_host", ",", "self", ".", "console", ")" ]
Return the directory names to preserve under the KLogN purge strategy .
def strategyKLogN ( kls , n , k = 4 ) : assert ( k > 1 ) s = set ( [ n ] ) i = 0 while k ** i <= n : s . update ( range ( n , n - k * k ** i , - k ** i ) ) i += 1 n -= n % k ** i return set ( map ( str , filter ( lambda x : x >= 0 , s ) ) )
10,189
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L220-L231
[ "def", "_start_console", "(", "self", ")", ":", "self", ".", "_remote_pipe", "=", "yield", "from", "asyncio_open_serial", "(", "self", ".", "_get_pipe_name", "(", ")", ")", "server", "=", "AsyncioTelnetServer", "(", "reader", "=", "self", ".", "_remote_pipe", ",", "writer", "=", "self", ".", "_remote_pipe", ",", "binary", "=", "True", ",", "echo", "=", "True", ")", "self", ".", "_telnet_server", "=", "yield", "from", "asyncio", ".", "start_server", "(", "server", ".", "run", ",", "self", ".", "_manager", ".", "port_manager", ".", "console_host", ",", "self", ".", "console", ")" ]
Return the set of snapshot directories and non - snapshot directories under the given path .
def listSnapshotDir ( kls , path ) : snapshotSet = set ( ) nonsnapshotSet = set ( ) try : entryList = os . listdir ( path ) for e in entryList : if kls . isFilenameInteger ( e ) : snapshotSet . add ( e ) else : nonsnapshotSet . add ( e ) except FileNotFoundError : pass finally : return snapshotSet , nonsnapshotSet
10,190
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L249-L261
[ "def", "groff2man", "(", "data", ")", ":", "width", "=", "get_width", "(", ")", "cmd", "=", "'groff -t -Tascii -m man -rLL=%dn -rLT=%dn'", "%", "(", "width", ",", "width", ")", "handle", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "shell", "=", "True", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "man_text", ",", "stderr", "=", "handle", ".", "communicate", "(", "data", ")", "return", "man_text" ]
rm - R path . Deletes but does not recurse into symlinks . If the path does not exist silently return .
def rmR ( kls , path ) : if os . path . islink ( path ) or os . path . isfile ( path ) : os . unlink ( path ) elif os . path . isdir ( path ) : walker = os . walk ( path , topdown = False , followlinks = False ) for dirpath , dirnames , filenames in walker : for f in filenames : os . unlink ( os . path . join ( dirpath , f ) ) for d in dirnames : os . rmdir ( os . path . join ( dirpath , d ) ) os . rmdir ( path )
10,191
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L264-L276
[ "def", "register", "(", "self", ")", ":", "self", ".", "_queue", ".", "put", "(", "hello_packet", "(", "socket", ".", "gethostname", "(", ")", ",", "mac", "(", ")", ",", "__version__", ")", ")", "self", ".", "_queue", ".", "put", "(", "request_packet", "(", "MSG_SERVER_SETTINGS", ")", ")", "self", ".", "_queue", ".", "put", "(", "request_packet", "(", "MSG_SAMPLE_FORMAT", ")", ")", "self", ".", "_queue", ".", "put", "(", "request_packet", "(", "MSG_HEADER", ")", ")" ]
Same syntax as os . symlink except that the new link called name will first be created with the name and target name . ATOMIC - > target then be atomically renamed to name - > target thus overwriting any previous symlink there . If a filesystem entity called name . ATOMIC already exists it will be forcibly removed .
def atomicSymlink ( kls , target , name ) : linkAtomicName = name + ".ATOMIC" linkFinalName = name linkTarget = target if os . path . lexists ( linkAtomicName ) : kls . rmR ( linkAtomicName ) os . symlink ( linkTarget , linkAtomicName ) ################################################ ######## FILESYSTEM LINEARIZATION POINT ######## ######## vvvvvvvvvvvvvvvvvvvvvvvvvvvvvv ######## os . rename ( linkAtomicName , linkFinalName )
10,192
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/exp/experiment.py#L279-L304
[ "def", "setConf", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "sparkSession", ".", "conf", ".", "set", "(", "key", ",", "value", ")" ]
Compensate temperature .
def _compensate_temperature ( self , adc_t ) : var_1 = ( ( adc_t / 16384.0 - self . _calibration_t [ 0 ] / 1024.0 ) * self . _calibration_t [ 1 ] ) var_2 = ( ( adc_t / 131072.0 - self . _calibration_t [ 0 ] / 8192.0 ) * ( adc_t / 131072.0 - self . _calibration_t [ 0 ] / 8192.0 ) * self . _calibration_t [ 2 ] ) self . _temp_fine = var_1 + var_2 if self . _delta_temp != 0. : # temperature correction for self heating temp = self . _temp_fine / 5120.0 + self . _delta_temp self . _temp_fine = temp * 5120.0 else : temp = self . _temp_fine / 5120.0 return temp
10,193
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L61-L79
[ "def", "_ar_matrix", "(", "self", ")", ":", "Y", "=", "np", ".", "array", "(", "self", ".", "data", "[", "self", ".", "max_lag", ":", "self", ".", "data", ".", "shape", "[", "0", "]", "]", ")", "X", "=", "self", ".", "data", "[", "(", "self", ".", "max_lag", "-", "1", ")", ":", "-", "1", "]", "if", "self", ".", "ar", "!=", "0", ":", "for", "i", "in", "range", "(", "1", ",", "self", ".", "ar", ")", ":", "X", "=", "np", ".", "vstack", "(", "(", "X", ",", "self", ".", "data", "[", "(", "self", ".", "max_lag", "-", "i", "-", "1", ")", ":", "-", "i", "-", "1", "]", ")", ")", "return", "X" ]
Compensate pressure .
def _compensate_pressure ( self , adc_p ) : var_1 = ( self . _temp_fine / 2.0 ) - 64000.0 var_2 = ( ( var_1 / 4.0 ) * ( var_1 / 4.0 ) ) / 2048 var_2 *= self . _calibration_p [ 5 ] var_2 += ( ( var_1 * self . _calibration_p [ 4 ] ) * 2.0 ) var_2 = ( var_2 / 4.0 ) + ( self . _calibration_p [ 3 ] * 65536.0 ) var_1 = ( ( ( self . _calibration_p [ 2 ] * ( ( ( var_1 / 4.0 ) * ( var_1 / 4.0 ) ) / 8192 ) ) / 8 ) + ( ( self . _calibration_p [ 1 ] * var_1 ) / 2.0 ) ) var_1 /= 262144 var_1 = ( ( 32768 + var_1 ) * self . _calibration_p [ 0 ] ) / 32768 if var_1 == 0 : return 0 pressure = ( ( 1048576 - adc_p ) - ( var_2 / 4096 ) ) * 3125 if pressure < 0x80000000 : pressure = ( pressure * 2.0 ) / var_1 else : pressure = ( pressure / var_1 ) * 2 var_1 = ( self . _calibration_p [ 8 ] * ( ( ( pressure / 8.0 ) * ( pressure / 8.0 ) ) / 8192.0 ) ) / 4096 var_2 = ( ( pressure / 4.0 ) * self . _calibration_p [ 7 ] ) / 8192.0 pressure += ( ( var_1 + var_2 + self . _calibration_p [ 6 ] ) / 16.0 ) return pressure / 100
10,194
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L81-L113
[ "def", "cp_cropduster_image", "(", "self", ",", "the_image_path", ",", "del_after_upload", "=", "False", ",", "overwrite", "=", "False", ",", "invalidate", "=", "False", ")", ":", "local_file", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "the_image_path", ")", "# only try to upload things if the origin cropduster file exists (so it is not already uploaded to the CDN)", "if", "os", ".", "path", ".", "exists", "(", "local_file", ")", ":", "the_image_crops_path", "=", "os", ".", "path", ".", "splitext", "(", "the_image_path", ")", "[", "0", "]", "the_image_crops_path_full_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "MEDIA_ROOT", ",", "the_image_crops_path", ")", "self", ".", "cp", "(", "local_path", "=", "local_file", ",", "target_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "S3_ROOT_BASE", ",", "the_image_path", ")", ",", "del_after_upload", "=", "del_after_upload", ",", "overwrite", "=", "overwrite", ",", "invalidate", "=", "invalidate", ",", ")", "self", ".", "cp", "(", "local_path", "=", "the_image_crops_path_full_path", "+", "\"/*\"", ",", "target_path", "=", "os", ".", "path", ".", "join", "(", "settings", ".", "S3_ROOT_BASE", ",", "the_image_crops_path", ")", ",", "del_after_upload", "=", "del_after_upload", ",", "overwrite", "=", "overwrite", ",", "invalidate", "=", "invalidate", ",", ")" ]
Compensate humidity .
def _compensate_humidity ( self , adc_h ) : var_h = self . _temp_fine - 76800.0 if var_h == 0 : return 0 var_h = ( ( adc_h - ( self . _calibration_h [ 3 ] * 64.0 + self . _calibration_h [ 4 ] / 16384.0 * var_h ) ) * ( self . _calibration_h [ 1 ] / 65536.0 * ( 1.0 + self . _calibration_h [ 5 ] / 67108864.0 * var_h * ( 1.0 + self . _calibration_h [ 2 ] / 67108864.0 * var_h ) ) ) ) var_h *= 1.0 - self . _calibration_h [ 0 ] * var_h / 524288.0 if var_h > 100.0 : var_h = 100.0 elif var_h < 0.0 : var_h = 0.0 return var_h
10,195
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L115-L138
[ "def", "delete_logs", "(", "room", ")", ":", "from", "indico_chat", ".", "plugin", "import", "ChatPlugin", "base_url", "=", "ChatPlugin", ".", "settings", ".", "get", "(", "'log_url'", ")", "if", "not", "base_url", "or", "room", ".", "custom_server", ":", "return", "try", ":", "response", "=", "requests", ".", "get", "(", "posixpath", ".", "join", "(", "base_url", ",", "'delete'", ")", ",", "params", "=", "{", "'cr'", ":", "room", ".", "jid", "}", ")", ".", "json", "(", ")", "except", "(", "RequestException", ",", "ValueError", ")", ":", "current_plugin", ".", "logger", ".", "exception", "(", "'Could not delete logs for %s'", ",", "room", ".", "jid", ")", "return", "if", "not", "response", ".", "get", "(", "'success'", ")", ":", "current_plugin", ".", "logger", ".", "warning", "(", "'Could not delete logs for %s: %s'", ",", "room", ".", "jid", ",", "response", ".", "get", "(", "'error'", ")", ")" ]
Take a forced measurement .
def _take_forced_measurement ( self ) : # set to forced mode, i.e. "take next measurement" self . _bus . write_byte_data ( self . _i2c_add , 0xF4 , self . ctrl_meas_reg ) while self . _bus . read_byte_data ( self . _i2c_add , 0xF3 ) & 0x08 : sleep ( 0.005 )
10,196
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L202-L213
[ "def", "create", "(", "DomainName", ",", "ElasticsearchClusterConfig", "=", "None", ",", "EBSOptions", "=", "None", ",", "AccessPolicies", "=", "None", ",", "SnapshotOptions", "=", "None", ",", "AdvancedOptions", "=", "None", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ",", "ElasticsearchVersion", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "kwargs", "=", "{", "}", "for", "k", "in", "(", "'ElasticsearchClusterConfig'", ",", "'EBSOptions'", ",", "'AccessPolicies'", ",", "'SnapshotOptions'", ",", "'AdvancedOptions'", ",", "'ElasticsearchVersion'", ")", ":", "if", "locals", "(", ")", "[", "k", "]", "is", "not", "None", ":", "val", "=", "locals", "(", ")", "[", "k", "]", "if", "isinstance", "(", "val", ",", "six", ".", "string_types", ")", ":", "try", ":", "val", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "val", ")", "except", "ValueError", "as", "e", ":", "return", "{", "'updated'", ":", "False", ",", "'error'", ":", "'Error parsing {0}: {1}'", ".", "format", "(", "k", ",", "e", ".", "message", ")", "}", "kwargs", "[", "k", "]", "=", "val", "if", "'AccessPolicies'", "in", "kwargs", ":", "kwargs", "[", "'AccessPolicies'", "]", "=", "salt", ".", "utils", ".", "json", ".", "dumps", "(", "kwargs", "[", "'AccessPolicies'", "]", ")", "if", "'ElasticsearchVersion'", "in", "kwargs", ":", "kwargs", "[", "'ElasticsearchVersion'", "]", "=", "six", ".", "text_type", "(", "kwargs", "[", "'ElasticsearchVersion'", "]", ")", "domain", "=", "conn", ".", "create_elasticsearch_domain", "(", "DomainName", "=", "DomainName", ",", "*", "*", "kwargs", ")", "if", "domain", "and", "'DomainStatus'", "in", "domain", ":", "return", "{", "'created'", ":", "True", "}", "else", ":", "log", ".", "warning", "(", "'Domain was not created'", ")", "return", "{", "'created'", ":", "False", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'created'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]
Read raw data and update compensated variables .
def update ( self , first_reading = False ) : try : if first_reading or not self . _ok : self . _bus . write_byte_data ( self . _i2c_add , 0xF2 , self . ctrl_hum_reg ) self . _bus . write_byte_data ( self . _i2c_add , 0xF5 , self . config_reg ) self . _bus . write_byte_data ( self . _i2c_add , 0xF4 , self . ctrl_meas_reg ) self . _populate_calibration_data ( ) if self . mode == 2 : # MODE_FORCED self . _take_forced_measurement ( ) data = [ ] for i in range ( 0xF7 , 0xF7 + 8 ) : data . append ( self . _bus . read_byte_data ( self . _i2c_add , i ) ) except OSError as exc : self . log_error ( "Bad update: %s" , exc ) self . _ok = False return pres_raw = ( data [ 0 ] << 12 ) | ( data [ 1 ] << 4 ) | ( data [ 2 ] >> 4 ) temp_raw = ( data [ 3 ] << 12 ) | ( data [ 4 ] << 4 ) | ( data [ 5 ] >> 4 ) hum_raw = ( data [ 6 ] << 8 ) | data [ 7 ] self . _ok = False temperature = self . _compensate_temperature ( temp_raw ) if ( temperature >= - 20 ) and ( temperature < 80 ) : self . _temperature = temperature self . _ok = True if self . _with_humidity : humidity = self . _compensate_humidity ( hum_raw ) if ( humidity >= 0 ) and ( humidity <= 100 ) : self . _humidity = humidity else : self . _ok = False if self . _with_pressure : pressure = self . _compensate_pressure ( pres_raw ) if pressure > 100 : self . _pressure = pressure else : self . _ok = False
10,197
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/bme280.py#L215-L257
[ "def", "handle_not_found", "(", "exception", ",", "*", "*", "extra", ")", ":", "assert", "isinstance", "(", "exception", ",", "NotFound", ")", "page", "=", "Page", ".", "query", ".", "filter", "(", "db", ".", "or_", "(", "Page", ".", "url", "==", "request", ".", "path", ",", "Page", ".", "url", "==", "request", ".", "path", "+", "\"/\"", ")", ")", ".", "first", "(", ")", "if", "page", ":", "_add_url_rule", "(", "page", ".", "url", ")", "return", "render_template", "(", "[", "page", ".", "template_name", ",", "current_app", ".", "config", "[", "'PAGES_DEFAULT_TEMPLATE'", "]", "]", ",", "page", "=", "page", ")", "elif", "'wrapped'", "in", "extra", ":", "return", "extra", "[", "'wrapped'", "]", "(", "exception", ")", "else", ":", "return", "exception" ]
Append a PileupElement to this Pileup . If an identical PileupElement is already part of this Pileup do nothing .
def append ( self , element ) : assert element . locus == self . locus , ( "Element locus (%s) != Pileup locus (%s)" % ( element . locus , self . locus ) ) self . elements [ element ] = None
10,198
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup.py#L55-L63
[ "def", "_login", "(", "self", ",", "username", ",", "password", ")", ":", "data", "=", "{", "'username'", ":", "username", ",", "'password'", ":", "password", ",", "'grant_type'", ":", "'password'", "}", "r", "=", "self", ".", "spark_api", ".", "oauth", ".", "token", ".", "POST", "(", "auth", "=", "(", "'spark'", ",", "'spark'", ")", ",", "data", "=", "data", ",", "timeout", "=", "self", ".", "timeout", ")", "self", ".", "_check_error", "(", "r", ")", "return", "r", ".", "json", "(", ")", "[", "'access_token'", "]" ]
Add all pileup elements from other into self .
def update ( self , other ) : assert self . locus == other . locus self . elements . update ( other . elements )
10,199
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup.py#L65-L70
[ "async", "def", "set_default_min_hwe_kernel", "(", "cls", ",", "version", ":", "typing", ".", "Optional", "[", "str", "]", ")", ":", "await", "cls", ".", "set_config", "(", "\"default_min_hwe_kernel\"", ",", "\"\"", "if", "version", "is", "None", "else", "version", ")" ]