query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
get the value of a given socket option
def getsockopt ( self , level , optname , * args , * * kwargs ) : return self . _sock . getsockopt ( level , optname , * args , * * kwargs )
7,000
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L219-L238
[ "def", "same_col", "(", "c", ")", ":", "return", "same_table", "(", "c", ")", "and", "all", "(", "is_col_aligned", "(", "_to_span", "(", "c", "[", "i", "]", ")", ".", "sentence", ",", "_to_span", "(", "c", "[", "0", "]", ")", ".", "sentence", ")", "for", "i", "in", "range", "(", "len", "(", "c", ")", ")", ")" ]
create a file - like object that wraps the socket
def makefile ( self , mode = 'r' , bufsize = - 1 ) : f = SocketFile ( self . _sock , mode ) f . _sock . settimeout ( self . gettimeout ( ) ) return f
7,001
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L259-L277
[ "def", "store_result", "(", "self", ",", "message", ",", "result", ":", "Result", ",", "ttl", ":", "int", ")", "->", "None", ":", "message_key", "=", "self", ".", "build_message_key", "(", "message", ")", "return", "self", ".", "_store", "(", "message_key", ",", "result", ",", "ttl", ")" ]
receive data on a socket that isn t necessarily a 1 - 1 connection
def recvfrom ( self , bufsize , flags = 0 ) : with self . _registered ( 're' ) : while 1 : if self . _closed : raise socket . error ( errno . EBADF , "Bad file descriptor" ) try : return self . _sock . recvfrom ( bufsize , flags ) except socket . error , exc : if not self . _blocking or exc [ 0 ] not in _BLOCKING_OP : raise sys . exc_clear ( ) if self . _readable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
7,002
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L346-L377
[ "def", "retrieve_log_trace", "(", "self", ",", "filename", "=", "None", ",", "dir", "=", "None", ")", ":", "if", "hasattr", "(", "self", ",", "\"applicationLogTrace\"", ")", "and", "self", ".", "applicationLogTrace", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Retrieving application logs from: \"", "+", "self", ".", "applicationLogTrace", ")", "if", "not", "filename", ":", "filename", "=", "_file_name", "(", "'job'", ",", "self", ".", "id", ",", "'.tar.gz'", ")", "return", "self", ".", "rest_client", ".", "_retrieve_file", "(", "self", ".", "applicationLogTrace", ",", "filename", ",", "dir", ",", "'application/x-compressed'", ")", "else", ":", "return", "None" ]
send data over the socket connection
def send ( self , data , flags = 0 ) : with self . _registered ( 'we' ) : while 1 : try : return self . _sock . send ( data ) except socket . error , exc : if exc [ 0 ] not in _CANT_SEND or not self . _blocking : raise if self . _writable . wait ( self . gettimeout ( ) ) : raise socket . timeout ( "timed out" ) if scheduler . state . interrupted : raise IOError ( errno . EINTR , "interrupted system call" )
7,003
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L415-L441
[ "def", "flatten_container", "(", "self", ",", "container", ")", ":", "for", "names", "in", "ARG_MAP", ".", "values", "(", ")", ":", "if", "names", "[", "TransformationTypes", ".", "CHRONOS", ".", "value", "]", "[", "'name'", "]", "and", "'.'", "in", "names", "[", "TransformationTypes", ".", "CHRONOS", ".", "value", "]", "[", "'name'", "]", ":", "chronos_dotted_name", "=", "names", "[", "TransformationTypes", ".", "CHRONOS", ".", "value", "]", "[", "'name'", "]", "parts", "=", "chronos_dotted_name", ".", "split", "(", "'.'", ")", "if", "parts", "[", "-", "2", "]", "==", "'parameters'", ":", "# Special lookup for docker parameters", "common_type", "=", "names", "[", "TransformationTypes", ".", "CHRONOS", ".", "value", "]", ".", "get", "(", "'type'", ")", "result", "=", "self", ".", "_lookup_parameter", "(", "container", ",", "parts", "[", "-", "1", "]", ",", "common_type", ")", "if", "result", ":", "container", "[", "chronos_dotted_name", "]", "=", "result", "else", ":", "result", "=", "lookup_nested_dict", "(", "container", ",", "*", "parts", ")", "if", "result", ":", "container", "[", "chronos_dotted_name", "]", "=", "result", "return", "container" ]
send data over the connection and keep sending until it all goes
def sendall ( self , data , flags = 0 ) : sent = self . send ( data , flags ) while sent < len ( data ) : sent += self . send ( data [ sent : ] , flags )
7,004
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L443-L457
[ "def", "ParseFileObject", "(", "self", ",", "parser_mediator", ",", "file_object", ")", ":", "regf_file", "=", "pyregf", ".", "file", "(", ")", "# pylint: disable=no-member", "try", ":", "regf_file", ".", "open_file_object", "(", "file_object", ")", "except", "IOError", ":", "# The error is currently ignored -> see TODO above related to the", "# fixing of handling multiple parsers for the same file format.", "return", "root_key", "=", "regf_file", ".", "get_root_key", "(", ")", "if", "root_key", "is", "None", ":", "regf_file", ".", "close", "(", ")", "return", "root_file_key", "=", "root_key", ".", "get_sub_key_by_path", "(", "self", ".", "_AMCACHE_ROOT_FILE_KEY", ")", "if", "root_file_key", "is", "None", ":", "regf_file", ".", "close", "(", ")", "return", "for", "volume_key", "in", "root_file_key", ".", "sub_keys", ":", "for", "am_entry", "in", "volume_key", ".", "sub_keys", ":", "self", ".", "_ProcessAMCacheFileKey", "(", "am_entry", ",", "parser_mediator", ")", "root_program_key", "=", "root_key", ".", "get_sub_key_by_path", "(", "self", ".", "_AMCACHE_ROOT_PROGRAM_KEY", ")", "if", "root_program_key", "is", "None", ":", "regf_file", ".", "close", "(", ")", "return", "for", "am_entry", "in", "root_program_key", ".", "sub_keys", ":", "self", ".", "_ProcessAMCacheProgramKey", "(", "am_entry", ",", "parser_mediator", ")", "regf_file", ".", "close", "(", ")" ]
set the value of a given socket option
def setsockopt ( self , level , optname , value ) : return self . _sock . setsockopt ( level , optname , value )
7,005
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L502-L516
[ "def", "same_col", "(", "c", ")", ":", "return", "same_table", "(", "c", ")", "and", "all", "(", "is_col_aligned", "(", "_to_span", "(", "c", "[", "i", "]", ")", ".", "sentence", ",", "_to_span", "(", "c", "[", "0", "]", ")", ".", "sentence", ")", "for", "i", "in", "range", "(", "len", "(", "c", ")", ")", ")" ]
set the timeout for this specific socket
def settimeout ( self , timeout ) : if timeout is not None : timeout = float ( timeout ) self . _timeout = timeout
7,006
https://github.com/teepark/greenhouse/blob/8fd1be4f5443ba090346b5ec82fdbeb0a060d956/greenhouse/io/sockets.py#L528-L538
[ "def", "tag_pos_volume", "(", "line", ")", ":", "def", "tagger", "(", "match", ")", ":", "groups", "=", "match", ".", "groupdict", "(", ")", "try", ":", "year", "=", "match", ".", "group", "(", "'year'", ")", "except", "IndexError", ":", "# Extract year from volume name", "# which should always include the year", "g", "=", "re", ".", "search", "(", "re_pos_year_num", ",", "match", ".", "group", "(", "'volume_num'", ")", ",", "re", ".", "UNICODE", ")", "year", "=", "g", ".", "group", "(", "0", ")", "if", "year", ":", "groups", "[", "'year'", "]", "=", "' <cds.YR>(%s)</cds.YR>'", "%", "year", ".", "strip", "(", ")", ".", "strip", "(", "'()'", ")", "else", ":", "groups", "[", "'year'", "]", "=", "''", "return", "'<cds.JOURNAL>PoS</cds.JOURNAL>'", "' <cds.VOL>%(volume_name)s%(volume_num)s</cds.VOL>'", "'%(year)s'", "' <cds.PG>%(page)s</cds.PG>'", "%", "groups", "for", "p", "in", "re_pos", ":", "line", "=", "p", ".", "sub", "(", "tagger", ",", "line", ")", "return", "line" ]
find current version information
def get_version ( ) : if not INSTALLED : try : with open ( 'version.txt' , 'r' ) as v_fh : return v_fh . read ( ) except Exception : warnings . warn ( 'Unable to resolve package version until installed' , UserWarning ) return '0.0.0' #can't parse version without stuff installed return p_version . get_version ( HERE )
7,007
https://github.com/EVEprosper/ProsperCommon/blob/bcada3b25420099e1f204db8d55eb268e7b4dc27/prosper/common/_version.py#L13-L31
[ "def", "swo_enable", "(", "self", ",", "cpu_speed", ",", "swo_speed", "=", "9600", ",", "port_mask", "=", "0x01", ")", ":", "if", "self", ".", "swo_enabled", "(", ")", ":", "self", ".", "swo_stop", "(", ")", "res", "=", "self", ".", "_dll", ".", "JLINKARM_SWO_EnableTarget", "(", "cpu_speed", ",", "swo_speed", ",", "enums", ".", "JLinkSWOInterfaces", ".", "UART", ",", "port_mask", ")", "if", "res", "!=", "0", ":", "raise", "errors", ".", "JLinkException", "(", "res", ")", "self", ".", "_swo_enabled", "=", "True", "return", "None" ]
Adds a list of term ids a new span in the references
def add_span ( self , term_span ) : new_span = Cspan ( ) new_span . create_from_ids ( term_span ) self . node . append ( new_span . get_node ( ) )
7,008
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/coreference_data.py#L78-L86
[ "def", "json_parse", "(", "self", ",", "response", ")", ":", "try", ":", "data", "=", "response", ".", "json", "(", ")", "except", "ValueError", ":", "data", "=", "{", "'meta'", ":", "{", "'status'", ":", "500", ",", "'msg'", ":", "'Server Error'", "}", ",", "'response'", ":", "{", "\"error\"", ":", "\"Malformed JSON or HTML was returned.\"", "}", "}", "# We only really care about the response if we succeed", "# and the error if we fail", "if", "200", "<=", "data", "[", "'meta'", "]", "[", "'status'", "]", "<=", "399", ":", "return", "data", "[", "'response'", "]", "else", ":", "return", "data" ]
Removes a specific span from the coref object
def remove_span ( self , span ) : this_node = span . get_node ( ) self . node . remove ( this_node )
7,009
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/coreference_data.py#L96-L101
[ "def", "_sync_last_sale_prices", "(", "self", ",", "dt", "=", "None", ")", ":", "if", "dt", "is", "None", ":", "dt", "=", "self", ".", "datetime", "if", "dt", "!=", "self", ".", "_last_sync_time", ":", "self", ".", "metrics_tracker", ".", "sync_last_sale_prices", "(", "dt", ",", "self", ".", "data_portal", ",", ")", "self", ".", "_last_sync_time", "=", "dt" ]
Converts the coreference layer to KAF
def to_kaf ( self ) : if self . type == 'NAF' : for node_coref in self . __get_corefs_nodes ( ) : node_coref . set ( 'coid' , node_coref . get ( 'id' ) ) del node_coref . attrib [ 'id' ]
7,010
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/coreference_data.py#L194-L201
[ "def", "_get_env_var", "(", "env_var_name", ")", ":", "if", "env_var_name", "in", "os", ".", "environ", ":", "return", "os", ".", "environ", "[", "env_var_name", "]", "fname", "=", "os", ".", "path", ".", "join", "(", "get_home", "(", ")", ",", "'.tangorc'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "fname", ")", ":", "if", "os", ".", "name", "==", "'posix'", ":", "fname", "=", "\"/etc/tangorc\"", "if", "not", "os", ".", "path", ".", "exists", "(", "fname", ")", ":", "return", "None", "for", "line", "in", "open", "(", "fname", ")", ":", "strippedline", "=", "line", ".", "split", "(", "'#'", ",", "1", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "not", "strippedline", ":", "# empty line", "continue", "tup", "=", "strippedline", ".", "split", "(", "'='", ",", "1", ")", "if", "len", "(", "tup", ")", "!=", "2", ":", "# illegal line!", "continue", "key", ",", "val", "=", "map", "(", "str", ".", "strip", ",", "tup", ")", "if", "key", "==", "env_var_name", ":", "return", "val" ]
Converts the coreference layer to NAF
def to_naf ( self ) : if self . type == 'KAF' : for node_coref in self . __get_corefs_nodes ( ) : node_coref . set ( 'id' , node_coref . get ( 'coid' ) ) del node_coref . attrib [ 'coid' ]
7,011
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/coreference_data.py#L203-L210
[ "def", "minutes_for_sessions_in_range", "(", "self", ",", "start_session_label", ",", "end_session_label", ")", ":", "first_minute", ",", "_", "=", "self", ".", "open_and_close_for_session", "(", "start_session_label", ")", "_", ",", "last_minute", "=", "self", ".", "open_and_close_for_session", "(", "end_session_label", ")", "return", "self", ".", "minutes_in_range", "(", "first_minute", ",", "last_minute", ")" ]
Removes the coreference with specific identifier
def remove_coreference ( self , coid ) : for this_node in self . node . findall ( 'coref' ) : if this_node . get ( 'id' ) == coid : self . node . remove ( this_node ) break
7,012
https://github.com/cltl/KafNafParserPy/blob/9bc32e803c176404b255ba317479b8780ed5f569/KafNafParserPy/coreference_data.py#L213-L222
[ "def", "_rescan", "(", "self", ",", "skip_to_end", "=", "True", ")", ":", "# Get listing of matching files.", "paths", "=", "[", "]", "for", "single_glob", "in", "self", ".", "_globspec", ":", "paths", ".", "extend", "(", "glob", ".", "glob", "(", "single_glob", ")", ")", "# Remove files that don't appear in the new list.", "for", "path", "in", "self", ".", "_tailedfiles", ".", "keys", "(", ")", ":", "if", "path", "not", "in", "paths", ":", "self", ".", "_tailedfiles", "[", "path", "]", ".", "_close", "(", ")", "del", "self", ".", "_tailedfiles", "[", "path", "]", "# Add any files we don't have open yet.", "for", "path", "in", "paths", ":", "try", ":", "# If the file has been rotated, reopen it.", "if", "self", ".", "_tailedfiles", "[", "path", "]", ".", "hasBeenRotated", "(", ")", ":", "# If it can't be reopened, close it.", "if", "not", "self", ".", "_tailedfiles", "[", "path", "]", ".", "reopen", "(", ")", ":", "del", "self", ".", "_tailedfiles", "[", "path", "]", "except", "KeyError", ":", "# Open a file that we haven't seen yet.", "self", ".", "_tailedfiles", "[", "path", "]", "=", "TailedFile", "(", "path", ",", "skip_to_end", "=", "skip_to_end", ",", "offset", "=", "self", ".", "_offsets", ".", "get", "(", "path", ",", "None", ")", ")" ]
exports vesseltree to esofspy format
def vt2esofspy ( vesseltree , outputfilename = "tracer.txt" , axisorder = [ 0 , 1 , 2 ] ) : if ( type ( vesseltree ) == str ) and os . path . isfile ( vesseltree ) : import io3d vt = io3d . misc . obj_from_file ( vesseltree ) else : vt = vesseltree print ( vt [ 'general' ] ) print ( vt . keys ( ) ) vtgm = vt [ 'graph' ] [ 'microstructure' ] lines = [ ] vs = vt [ 'general' ] [ 'voxel_size_mm' ] sh = vt [ 'general' ] [ 'shape_px' ] # switch axis ax = axisorder lines . append ( "#Tracer+\n" ) lines . append ( "#voxelsize mm %f %f %f\n" % ( vs [ ax [ 0 ] ] , vs [ ax [ 1 ] ] , vs [ ax [ 2 ] ] ) ) lines . append ( "#shape %i %i %i\n" % ( sh [ ax [ 0 ] ] , sh [ ax [ 1 ] ] , sh [ ax [ 2 ] ] ) ) lines . append ( str ( len ( vtgm ) * 2 ) + "\n" ) i = 1 for id in vtgm : # edge[''] try : nda = vtgm [ id ] [ 'nodeA_ZYX' ] ndb = vtgm [ id ] [ 'nodeB_ZYX' ] lines . append ( "%i\t%i\t%i\t%i\n" % ( nda [ ax [ 0 ] ] , nda [ ax [ 1 ] ] , nda [ ax [ 2 ] ] , i ) ) lines . append ( "%i\t%i\t%i\t%i\n" % ( ndb [ ax [ 0 ] ] , ndb [ ax [ 1 ] ] , ndb [ ax [ 2 ] ] , i ) ) i += 1 except : pass lines . append ( "%i\t%i\t%i\t%i" % ( 0 , 0 , 0 , 0 ) ) lines [ 3 ] = str ( i - 1 ) + "\n" with open ( outputfilename , 'wt' ) as f : f . writelines ( lines )
7,013
https://github.com/mjirik/imtools/blob/eb29fa59df0e0684d8334eb3bc5ef36ea46d1d3a/imtools/vesseltree_export.py#L22-L68
[ "def", "add_bonus", "(", "worker_dict", ")", ":", "try", ":", "unique_id", "=", "'{}:{}'", ".", "format", "(", "worker_dict", "[", "'workerId'", "]", ",", "worker_dict", "[", "'assignmentId'", "]", ")", "worker", "=", "Participant", ".", "query", ".", "filter", "(", "Participant", ".", "uniqueid", "==", "unique_id", ")", ".", "one", "(", ")", "worker_dict", "[", "'bonus'", "]", "=", "worker", ".", "bonus", "except", "sa", ".", "exc", ".", "InvalidRequestError", ":", "# assignment is found on mturk but not in local database.", "worker_dict", "[", "'bonus'", "]", "=", "'N/A'", "return", "worker_dict" ]
calculate Gruneisen parameter for constant q
def constq_grun ( v , v0 , gamma0 , q ) : x = v / v0 return gamma0 * np . power ( x , q )
7,014
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_constq.py#L13-L24
[ "def", "parse_mimetype", "(", "cls", ",", "value", ")", ":", "match", "=", "re", ".", "match", "(", "r'([a-zA-Z0-9-]+/[a-zA-Z0-9-]+)'", ",", "value", ")", "if", "match", ":", "return", "match", ".", "group", "(", "1", ")" ]
calculate Debye temperature for constant q
def constq_debyetemp ( v , v0 , gamma0 , q , theta0 ) : gamma = constq_grun ( v , v0 , gamma0 , q ) if isuncertainties ( [ v , v0 , gamma0 , q , theta0 ] ) : theta = theta0 * unp . exp ( ( gamma0 - gamma ) / q ) else : theta = theta0 * np . exp ( ( gamma0 - gamma ) / q ) return theta
7,015
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_constq.py#L27-L43
[ "def", "format_modified", "(", "self", ",", "modified", ",", "sep", "=", "\" \"", ")", ":", "if", "modified", "is", "not", "None", ":", "return", "modified", ".", "strftime", "(", "\"%Y-%m-%d{0}%H:%M:%S.%fZ\"", ".", "format", "(", "sep", ")", ")", "return", "u\"\"" ]
calculate thermal pressure for constant q
def constq_pth ( v , temp , v0 , gamma0 , q , theta0 , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) # x = v / v0 gamma = constq_grun ( v , v0 , gamma0 , q ) theta = constq_debyetemp ( v , v0 , gamma0 , q , theta0 ) xx = theta / temp debye = debye_E ( xx ) if t_ref == 0. : debye0 = 0. else : xx0 = theta / t_ref debye0 = debye_E ( xx0 ) Eth0 = three_r * n * t_ref * debye0 Eth = three_r * n * temp * debye delEth = Eth - Eth0 p_th = ( gamma / v_mol * delEth ) * 1.e-9 return p_th
7,016
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_constq.py#L46-L78
[ "def", "ng_dissim", "(", "a", ",", "b", ",", "X", "=", "None", ",", "membship", "=", "None", ")", ":", "# Without membership, revert to matching dissimilarity", "if", "membship", "is", "None", ":", "return", "matching_dissim", "(", "a", ",", "b", ")", "def", "calc_cjr", "(", "b", ",", "X", ",", "memj", ",", "idr", ")", ":", "\"\"\"Num objects w/ category value x_{i,r} for rth attr in jth cluster\"\"\"", "xcids", "=", "np", ".", "where", "(", "memj", "==", "1", ")", "return", "float", "(", "(", "np", ".", "take", "(", "X", ",", "xcids", ",", "axis", "=", "0", ")", "[", "0", "]", "[", ":", ",", "idr", "]", "==", "b", "[", "idr", "]", ")", ".", "sum", "(", "0", ")", ")", "def", "calc_dissim", "(", "b", ",", "X", ",", "memj", ",", "idr", ")", ":", "# Size of jth cluster", "cj", "=", "float", "(", "np", ".", "sum", "(", "memj", ")", ")", "return", "(", "1.0", "-", "(", "calc_cjr", "(", "b", ",", "X", ",", "memj", ",", "idr", ")", "/", "cj", ")", ")", "if", "cj", "!=", "0.0", "else", "0.0", "if", "len", "(", "membship", ")", "!=", "a", ".", "shape", "[", "0", "]", "and", "len", "(", "membship", "[", "0", "]", ")", "!=", "X", ".", "shape", "[", "1", "]", ":", "raise", "ValueError", "(", "\"'membship' must be a rectangular array where \"", "\"the number of rows in 'membship' equals the \"", "\"number of rows in 'a' and the number of \"", "\"columns in 'membship' equals the number of rows in 'X'.\"", ")", "return", "np", ".", "array", "(", "[", "np", ".", "array", "(", "[", "calc_dissim", "(", "b", ",", "X", ",", "membship", "[", "idj", "]", ",", "idr", ")", "if", "b", "[", "idr", "]", "==", "t", "else", "1.0", "for", "idr", ",", "t", "in", "enumerate", "(", "val_a", ")", "]", ")", ".", "sum", "(", "0", ")", "for", "idj", ",", "val_a", "in", "enumerate", "(", "a", ")", "]", ")" ]
Retrieve valid urn - references for a text
def getValidReff ( self , urn , inventory = None , level = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "level" : level , "request" : "GetValidReff" } )
7,017
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/retrievers/cts5.py#L75-L92
[ "def", "setDefaultColorRamp", "(", "self", ",", "colorRampEnum", "=", "ColorRampEnum", ".", "COLOR_RAMP_HUE", ")", ":", "self", ".", "_colorRamp", "=", "ColorRampGenerator", ".", "generateDefaultColorRamp", "(", "colorRampEnum", ")" ]
Retrieve a passage
def getPassage ( self , urn , inventory = None , context = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "context" : context , "request" : "GetPassage" } )
7,018
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/retrievers/cts5.py#L139-L155
[ "def", "_set_virtual", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", "and", "key", "not", "in", "self", ".", "_virtual_keys", ":", "return", "# Do nothing for non-virtual keys.", "self", ".", "_virtual_keys", ".", "add", "(", "key", ")", "if", "key", "in", "self", "and", "self", "[", "key", "]", "is", "not", "value", ":", "self", ".", "_on_change", "(", "key", ",", "value", ")", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "value", ")", "for", "overlay", "in", "self", ".", "_iter_overlays", "(", ")", ":", "overlay", ".", "_set_virtual", "(", "key", ",", "value", ")" ]
Retrieve a passage and information about it
def getPassagePlus ( self , urn , inventory = None , context = None ) : return self . call ( { "inv" : inventory , "urn" : urn , "context" : context , "request" : "GetPassagePlus" } )
7,019
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/retrievers/cts5.py#L157-L173
[ "def", "update_cluster", "(", "cluster_ref", ",", "cluster_spec", ")", ":", "cluster_name", "=", "get_managed_object_name", "(", "cluster_ref", ")", "log", ".", "trace", "(", "'Updating cluster \\'%s\\''", ",", "cluster_name", ")", "try", ":", "task", "=", "cluster_ref", ".", "ReconfigureComputeResource_Task", "(", "cluster_spec", ",", "modify", "=", "True", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "wait_for_task", "(", "task", ",", "cluster_name", ",", "'ClusterUpdateTask'", ")" ]
Parse a diff into an iterator of Intervals .
def parse_intervals ( diff_report ) : for patch in diff_report . patch_set : try : old_pf = diff_report . old_file ( patch . source_file ) new_pf = diff_report . new_file ( patch . target_file ) except InvalidPythonFile : continue for hunk in patch : for line in hunk : if line . line_type == LINE_TYPE_ADDED : idx = line . target_line_no yield ContextInterval ( new_pf . filename , new_pf . context ( idx ) ) elif line . line_type == LINE_TYPE_REMOVED : idx = line . source_line_no yield ContextInterval ( old_pf . filename , old_pf . context ( idx ) ) elif line . line_type in ( LINE_TYPE_EMPTY , LINE_TYPE_CONTEXT ) : pass else : raise AssertionError ( "Unexpected line type: %s" % line )
7,020
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/diff.py#L31-L54
[ "def", "hacking_no_author_tags", "(", "physical_line", ")", ":", "for", "regex", "in", "AUTHOR_TAG_RE", ":", "if", "regex", ".", "match", "(", "physical_line", ")", ":", "physical_line", "=", "physical_line", ".", "lower", "(", ")", "pos", "=", "physical_line", ".", "find", "(", "'moduleauthor'", ")", "if", "pos", "<", "0", ":", "pos", "=", "physical_line", ".", "find", "(", "'author'", ")", "return", "(", "pos", ",", "\"H105: Don't use author tags\"", ")" ]
Choose a string literal that can wrap our string .
def string_literal ( content ) : if '"' in content and "'" in content : # there is no way to escape string literal characters in XPath raise ValueError ( "Cannot represent this string in XPath" ) if '"' in content : # if it contains " wrap it in ' content = "'%s'" % content else : # wrap it in " content = '"%s"' % content return content
7,021
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L32-L51
[ "def", "_ExtractMetadataFromFileEntry", "(", "self", ",", "mediator", ",", "file_entry", ",", "data_stream", ")", ":", "# Do not extract metadata from the root file entry when it is virtual.", "if", "file_entry", ".", "IsRoot", "(", ")", "and", "file_entry", ".", "type_indicator", "not", "in", "(", "self", ".", "_TYPES_WITH_ROOT_METADATA", ")", ":", "return", "# We always want to extract the file entry metadata but we only want", "# to parse it once per file entry, so we only use it if we are", "# processing the default data stream of regular files.", "if", "data_stream", "and", "not", "data_stream", ".", "IsDefault", "(", ")", ":", "return", "display_name", "=", "mediator", ".", "GetDisplayName", "(", ")", "logger", ".", "debug", "(", "'[ExtractMetadataFromFileEntry] processing file entry: {0:s}'", ".", "format", "(", "display_name", ")", ")", "self", ".", "processing_status", "=", "definitions", ".", "STATUS_INDICATOR_EXTRACTING", "if", "self", ".", "_processing_profiler", ":", "self", ".", "_processing_profiler", ".", "StartTiming", "(", "'extracting'", ")", "self", ".", "_event_extractor", ".", "ParseFileEntryMetadata", "(", "mediator", ",", "file_entry", ")", "if", "self", ".", "_processing_profiler", ":", "self", ".", "_processing_profiler", ".", "StopTiming", "(", "'extracting'", ")", "self", ".", "processing_status", "=", "definitions", ".", "STATUS_INDICATOR_RUNNING" ]
The ID of an element referenced by a label s for attribute . The label must be visible .
def element_id_by_label ( browser , label ) : label = ElementSelector ( browser , str ( '//label[contains(., %s)]' % string_literal ( label ) ) ) if not label : return False return label . get_attribute ( 'for' )
7,022
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L224-L239
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
Find a button with the given value .
def find_button ( browser , value ) : field_types = ( 'submit' , 'reset' , 'button-element' , 'button' , 'image' , 'button-role' , ) return reduce ( operator . add , ( find_field_with_value ( browser , field_type , value ) for field_type in field_types ) )
7,023
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L280-L308
[ "def", "addNoiseToVector", "(", "inputVector", ",", "noiseLevel", ",", "vectorType", ")", ":", "if", "vectorType", "==", "'sparse'", ":", "corruptSparseVector", "(", "inputVector", ",", "noiseLevel", ")", "elif", "vectorType", "==", "'dense'", ":", "corruptDenseVector", "(", "inputVector", ",", "noiseLevel", ")", "else", ":", "raise", "ValueError", "(", "\"vectorType must be 'sparse' or 'dense' \"", ")" ]
Locate an input field .
def find_field ( browser , field_type , value ) : return find_field_by_id ( browser , field_type , value ) + find_field_by_name ( browser , field_type , value ) + find_field_by_label ( browser , field_type , value )
7,024
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L331-L346
[ "def", "add_otp_style", "(", "self", ",", "zip_odp", ",", "style_file", ")", ":", "style", "=", "zipwrap", ".", "Zippier", "(", "style_file", ")", "for", "picture_file", "in", "style", ".", "ls", "(", "\"Pictures\"", ")", ":", "zip_odp", ".", "write", "(", "picture_file", ",", "style", ".", "cat", "(", "picture_file", ",", "True", ")", ")", "xml_data", "=", "style", ".", "cat", "(", "\"styles.xml\"", ",", "False", ")", "# import pdb;pdb.set_trace()", "xml_data", "=", "self", ".", "override_styles", "(", "xml_data", ")", "zip_odp", ".", "write", "(", "\"styles.xml\"", ",", "xml_data", ")" ]
Locate the control input with the given id .
def find_field_by_id ( browser , field_type , id ) : return ElementSelector ( browser , xpath = field_xpath ( field_type , 'id' ) % string_literal ( id ) , filter_displayed = True , )
7,025
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L369-L383
[ "def", "factory", "(", "fileobject", ",", "jfs", ",", "parentpath", ")", ":", "# fileobject from lxml.objectify", "if", "hasattr", "(", "fileobject", ",", "'currentRevision'", ")", ":", "# a normal file", "return", "JFSFile", "(", "fileobject", ",", "jfs", ",", "parentpath", ")", "elif", "str", "(", "fileobject", ".", "latestRevision", ".", "state", ")", "==", "ProtoFile", ".", "STATE_INCOMPLETE", ":", "return", "JFSIncompleteFile", "(", "fileobject", ",", "jfs", ",", "parentpath", ")", "elif", "str", "(", "fileobject", ".", "latestRevision", ".", "state", ")", "==", "ProtoFile", ".", "STATE_CORRUPT", ":", "return", "JFSCorruptFile", "(", "fileobject", ",", "jfs", ",", "parentpath", ")", "else", ":", "raise", "NotImplementedError", "(", "'No JFS*File support for state %r. Please file a bug!'", "%", "fileobject", ".", "latestRevision", ".", "state", ")" ]
Locate the control input with the given name .
def find_field_by_name ( browser , field_type , name ) : return ElementSelector ( browser , field_xpath ( field_type , 'name' ) % string_literal ( name ) , filter_displayed = True , )
7,026
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L386-L401
[ "def", "random_stochastic_matrix", "(", "n", ",", "k", "=", "None", ",", "sparse", "=", "False", ",", "format", "=", "'csr'", ",", "random_state", "=", "None", ")", ":", "P", "=", "_random_stochastic_matrix", "(", "m", "=", "n", ",", "n", "=", "n", ",", "k", "=", "k", ",", "sparse", "=", "sparse", ",", "format", "=", "format", ",", "random_state", "=", "random_state", ")", "return", "P" ]
Locate the control input with the given value . Useful for buttons .
def find_field_by_value ( browser , field_type , name ) : xpath = field_xpath ( field_type , 'value' ) % string_literal ( name ) elems = ElementSelector ( browser , xpath = str ( xpath ) , filter_displayed = True , filter_enabled = True , ) # sort by shortest first (most closely matching) if field_type in ( 'button-element' , 'button-role' ) : elems = sorted ( elems , key = lambda elem : len ( elem . text ) ) else : elems = sorted ( elems , key = lambda elem : len ( elem . get_attribute ( 'value' ) ) ) if elems : elems = [ elems [ 0 ] ] return ElementSelector ( browser , elements = elems )
7,027
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L404-L432
[ "def", "remove_extra_presentations", "(", "self", ",", "resource", ",", "timeout", "=", "-", "1", ")", ":", "uri", "=", "self", ".", "URI", "+", "\"/repair\"", "custom_headers", "=", "{", "'Accept-Language'", ":", "'en_US'", "}", "return", "self", ".", "_client", ".", "create", "(", "resource", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ",", "custom_headers", "=", "custom_headers", ")" ]
Locate the control input that has a label pointing to it .
def find_field_by_label ( browser , field_type , label ) : return ElementSelector ( browser , xpath = field_xpath ( field_type , 'id' ) % '//label[contains(., {0})]/@for' . format ( string_literal ( label ) ) , filter_displayed = True , )
7,028
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L435-L456
[ "def", "readme_verify", "(", ")", ":", "expected", "=", "populate_readme", "(", "REVISION", ",", "RTD_VERSION", ")", "# Actually get the stored contents.", "with", "open", "(", "README_FILE", ",", "\"r\"", ")", "as", "file_obj", ":", "contents", "=", "file_obj", ".", "read", "(", ")", "if", "contents", "!=", "expected", ":", "err_msg", "=", "\"\\n\"", "+", "get_diff", "(", "contents", ",", "expected", ",", "\"README.rst.actual\"", ",", "\"README.rst.expected\"", ")", "raise", "ValueError", "(", "err_msg", ")", "else", ":", "print", "(", "\"README contents are as expected.\"", ")" ]
A decorator to invoke a function retrying on assertion errors for a specified time interval .
def wait_for ( func ) : @ wraps ( func ) def wrapped ( * args , * * kwargs ) : timeout = kwargs . pop ( 'timeout' , TIMEOUT ) start = None while True : try : return func ( * args , * * kwargs ) except AssertionError : # The function took some time to test the assertion, however, # the result might correspond to the state of the world at any # point in time, perhaps earlier than the timeout. Therefore, # start counting time from the first assertion fail, not from # before the function was called. if not start : start = time ( ) if time ( ) - start < timeout : sleep ( CHECK_EVERY ) continue else : raise return wrapped
7,029
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L481-L513
[ "def", "delete_everything", "(", "self", ")", ":", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "basebackups", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of a base backup'", ")", "for", "k", "in", "self", ".", "_backup_list", "(", "prefix", "=", "self", ".", "layout", ".", "wal_directory", "(", ")", ")", ":", "self", ".", "_maybe_delete_key", "(", "k", ",", "'part of wal logs'", ")", "if", "self", ".", "deleter", ":", "self", ".", "deleter", ".", "close", "(", ")" ]
Filter elements by visibility and enabled status .
def filter ( self , displayed = False , enabled = False ) : if self . evaluated : # Filter elements one by one result = self if displayed : result = ElementSelector ( result . browser , elements = [ e for e in result if e . is_displayed ( ) ] ) if enabled : result = ElementSelector ( result . browser , elements = [ e for e in result if e . is_enabled ( ) ] ) else : result = copy ( self ) if displayed : result . displayed = True if enabled : result . enabled = True return result
7,030
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L107-L140
[ "def", "_get_conversion_outfile", "(", "self", ",", "convert_to", "=", "None", ")", ":", "conversion", "=", "self", ".", "_get_conversion_type", "(", "convert_to", ")", "prefix", "=", "\"Singularity\"", "if", "conversion", "==", "\"docker\"", ":", "prefix", "=", "\"Dockerfile\"", "suffix", "=", "next", "(", "tempfile", ".", "_get_candidate_names", "(", ")", ")", "return", "\"%s.%s\"", "%", "(", "prefix", ",", "suffix", ")" ]
Fetch the elements from the browser .
def _select ( self ) : for element in self . browser . find_elements_by_xpath ( self . xpath ) : if self . filter_displayed : if not element . is_displayed ( ) : continue if self . filter_enabled : if not element . is_enabled ( ) : continue yield element
7,031
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/util.py#L142-L154
[ "def", "build_configuration_parameters", "(", "app", ")", ":", "env", "=", "Environment", "(", "loader", "=", "FileSystemLoader", "(", "\"{0}/_data_templates\"", ".", "format", "(", "BASEPATH", ")", ")", ")", "template_file", "=", "env", ".", "get_template", "(", "\"configuration-parameters.j2\"", ")", "data", "=", "{", "}", "data", "[", "\"schema\"", "]", "=", "Config", ".", "schema", "(", ")", "rendered_template", "=", "template_file", ".", "render", "(", "*", "*", "data", ")", "output_dir", "=", "\"{0}/configuration/generated\"", ".", "format", "(", "BASEPATH", ")", "with", "open", "(", "\"{}/parameters.rst\"", ".", "format", "(", "output_dir", ")", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "write", "(", "rendered_template", ")" ]
Authenticate and enable the registered notifications
def authenticate ( self ) -> bool : with IHCController . _mutex : if not self . client . authenticate ( self . _username , self . _password ) : return False if self . _ihcevents : self . client . enable_runtime_notifications ( self . _ihcevents . keys ( ) ) return True
7,032
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L31-L39
[ "def", "dump_compile", "(", "codeobject", ",", "filename", ",", "timestamp", ",", "magic", ")", ":", "# Atomically write the pyc/pyo file. Issue #13146.", "# id() is used to generate a pseudo-random filename.", "path_tmp", "=", "'%s.%s'", "%", "(", "filename", ",", "id", "(", "filename", ")", ")", "fc", "=", "None", "try", ":", "fc", "=", "open", "(", "path_tmp", ",", "'wb'", ")", "if", "PYTHON3", ":", "fc", ".", "write", "(", "bytes", "(", "[", "0", ",", "0", ",", "0", ",", "0", "]", ")", ")", "else", ":", "fc", ".", "write", "(", "'\\0\\0\\0\\0'", ")", "wr_long", "(", "fc", ",", "timestamp", ")", "marshal", ".", "dump", "(", "codeobject", ",", "fc", ")", "fc", ".", "flush", "(", ")", "fc", ".", "seek", "(", "0", ",", "0", ")", "fc", ".", "write", "(", "magic", ")", "fc", ".", "close", "(", ")", "os", ".", "rename", "(", "path_tmp", ",", "filename", ")", "except", "OSError", ":", "try", ":", "os", ".", "unlink", "(", "path_tmp", ")", "except", "OSError", ":", "pass", "raise", "finally", ":", "if", "fc", ":", "fc", ".", "close", "(", ")" ]
Get runtime value with re - authenticate if needed
def get_runtime_value ( self , ihcid : int ) : if self . client . get_runtime_value ( ihcid ) : return True self . re_authenticate ( ) return self . client . get_runtime_value ( ihcid )
7,033
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L47-L52
[ "def", "update", "(", "self", ",", "collection_id", ",", "title", "=", "None", ",", "description", "=", "None", ",", "private", "=", "False", ")", ":", "url", "=", "\"/collections/%s\"", "%", "collection_id", "data", "=", "{", "\"title\"", ":", "title", ",", "\"description\"", ":", "description", ",", "\"private\"", ":", "private", "}", "result", "=", "self", ".", "_put", "(", "url", ",", "data", "=", "data", ")", "return", "CollectionModel", ".", "parse", "(", "result", ")" ]
Set bool runtime value with re - authenticate if needed
def set_runtime_value_bool ( self , ihcid : int , value : bool ) -> bool : if self . client . set_runtime_value_bool ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_bool ( ihcid , value )
7,034
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L54-L59
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
Set integer runtime value with re - authenticate if needed
def set_runtime_value_int ( self , ihcid : int , value : int ) -> bool : if self . client . set_runtime_value_int ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_int ( ihcid , value )
7,035
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L61-L66
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
Set float runtime value with re - authenticate if needed
def set_runtime_value_float ( self , ihcid : int , value : float ) -> bool : if self . client . set_runtime_value_float ( ihcid , value ) : return True self . re_authenticate ( ) return self . client . set_runtime_value_float ( ihcid , value )
7,036
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L68-L73
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
Get the ihc project and make sure controller is ready before
def get_project ( self ) -> str : with IHCController . _mutex : if self . _project is None : if self . client . get_state ( ) != IHCSTATE_READY : ready = self . client . wait_for_state_change ( IHCSTATE_READY , 10 ) if ready != IHCSTATE_READY : return None self . _project = self . client . get_project ( ) return self . _project
7,037
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L75-L85
[ "def", "both_args", "(", "self", ")", ":", "turtle", "=", "self", ".", "STACKARG_SP_BUFF", "+", "self", ".", "STACKARG_SP_DIFF", "while", "True", ":", "yield", "SimStackArg", "(", "turtle", ",", "self", ".", "arch", ".", "bytes", ")", "turtle", "+=", "self", ".", "arch", ".", "bytes" ]
Add a notify callback for a specified resource id If delayed is set to true the enable request will be send from the notofication thread
def add_notify_event ( self , resourceid : int , callback , delayed = False ) : with IHCController . _mutex : if resourceid in self . _ihcevents : self . _ihcevents [ resourceid ] . append ( callback ) else : self . _ihcevents [ resourceid ] = [ callback ] if delayed : self . _newnotifyids . append ( resourceid ) else : if not self . client . enable_runtime_notification ( resourceid ) : return False if not self . _notifyrunning : self . _notifythread . start ( ) return True
7,038
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L87-L105
[ "def", "update_custom_field_options", "(", "self", ",", "custom_field_key", ",", "new_options", ",", "keep_existing_options", ")", ":", "custom_field_key", "=", "quote", "(", "custom_field_key", ",", "''", ")", "body", "=", "{", "\"Options\"", ":", "new_options", ",", "\"KeepExistingOptions\"", ":", "keep_existing_options", "}", "response", "=", "self", ".", "_put", "(", "self", ".", "uri_for", "(", "\"customfields/%s/options\"", "%", "custom_field_key", ")", ",", "json", ".", "dumps", "(", "body", ")", ")" ]
The notify thread function .
def _notify_fn ( self ) : self . _notifyrunning = True while self . _notifyrunning : try : with IHCController . _mutex : # Are there are any new ids to be added? if self . _newnotifyids : self . client . enable_runtime_notifications ( self . _newnotifyids ) self . _newnotifyids = [ ] changes = self . client . wait_for_resource_value_changes ( ) if changes is False : self . re_authenticate ( True ) continue for ihcid in changes : value = changes [ ihcid ] if ihcid in self . _ihcevents : for callback in self . _ihcevents [ ihcid ] : callback ( ihcid , value ) except Exception as exp : self . re_authenticate ( True )
7,039
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L107-L129
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Authenticate again after failure . Keep trying with 10 sec interval . If called from the notify thread we will not have a timeout but will end if the notify thread has been cancled . Will return True if authentication was successful .
def re_authenticate ( self , notify : bool = False ) -> bool : timeout = datetime . now ( ) + timedelta ( seconds = self . reauthenticatetimeout ) while True : if self . authenticate ( ) : return True if notify : if not self . _notifyrunning : return False else : if timeout and datetime . now ( ) > timeout : return False # wait before we try to authenticate again time . sleep ( self . retryinterval )
7,040
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihccontroller.py#L131-L151
[ "def", "SetSerializersProfiler", "(", "self", ",", "serializers_profiler", ")", ":", "self", ".", "_serializers_profiler", "=", "serializers_profiler", "if", "self", ".", "_storage_file", ":", "self", ".", "_storage_file", ".", "SetSerializersProfiler", "(", "serializers_profiler", ")" ]
Iterate over the lines of The Complete Works of William Shakespeare .
def get_texts ( self , metadata = None ) : if metadata is None : metadata = self . metadata self . input_file = gzip . GzipFile ( self . input_file_path ) volume_num = 0 with self . input_file as lines : for lineno , line in enumerate ( lines ) : if volume_num >= len ( self . book_meta [ 'volumes' ] ) : raise StopIteration ( ) if lineno < self . book_meta [ 'volumes' ] [ volume_num ] [ 'start' ] : continue if lineno < self . book_meta [ 'volumes' ] [ volume_num ] [ 'stop' ] : # act_num, scene_num = 0, 0 # FIXME: use self.book_meta['volumes'][volume_num]['sections'] if metadata : # FIXME: use self.lemmatize toks = self . tokenize ( line , lowercase = self . lowercase ) yield ( toks , ( lineno , ) ) else : toks = self . tokenize ( line , lowercase = self . lowercase ) yield toks else : volume_num += 1
7,041
https://github.com/totalgood/twip/blob/5c0411d2acfbe5b421841072814c9152591c03f7/docs/notebooks/shakescorpus.py#L169-L207
[ "def", "create_index", "(", "self", ",", "collection", ",", "index_name", ",", "*", "*", "kwargs", ")", ":", "try", ":", "self", ".", "connection", "[", "collection", "]", ".", "create_index", "(", "index_name", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "LOG", ".", "warn", "(", "\"Error tuning mongodb database: %s\"", ",", "exc", ")" ]
If you try to encode each element individually with python this would take days!
def encode ( df , encoding = 'utf8' , verbosity = 1 ) : if verbosity > 0 : # pbar_i = 0 pbar = progressbar . ProgressBar ( maxval = df . shape [ 1 ] ) pbar . start ( ) # encode strings as UTF-8 so they'll work in python2 and python3 for colnum , col in enumerate ( df . columns ) : if isinstance ( df [ col ] , pd . Series ) : if verbosity : pbar . update ( colnum ) if df [ col ] . dtype in ( np . dtype ( 'object' ) , np . dtype ( 'U' ) , np . dtype ( 'S' ) ) and any ( isinstance ( obj , basestring ) for obj in df [ col ] ) : strmask = np . array ( [ isinstance ( obj , basestring ) for obj in df [ col ] ] ) series = df [ col ] . copy ( ) try : series [ strmask ] = np . char . encode ( series [ strmask ] . values . astype ( 'U' ) ) except TypeError : print ( "Unable to convert {} elements starting at position {} in column {}" . format ( sum ( strmask ) , [ i for i , b in enumerate ( strmask ) if b ] [ : 1 ] , col ) ) raise except ( UnicodeDecodeError , UnicodeEncodeError ) : try : series [ strmask ] = np . array ( [ eval ( s , { } , { } ) for s in series [ strmask ] ] ) # FIXME: do something different for unicode and decode errors except ( SyntaxError , UnicodeDecodeError , UnicodeEncodeError ) : newseries = [ ] for s in series [ strmask ] : try : newseries += [ s . encode ( 'utf8' ) ] except : print ( u'Had trouble encoding {} so used repr to turn it into {}' . format ( s , repr ( transcode_unicode ( s ) ) ) ) # strip all unicode chars are convert to ASCII str newseries += [ transcode_unicode ( s ) ] # for dtype('U'): UnicodeDecodeError: 'ascii' codec can't decode byte 0xe2 in position 207: ordinal not in r series [ strmask ] = np . array ( newseries ) . astype ( 'O' ) df [ col ] = series # df[col] = np.array([x.encode('utf8') if isinstance(x, unicode) else x for x in df[col]]) # WARNING: this takes DAYS for only 100k tweets! # series = df[col].copy() # for i, value in series.iteritems(): # if isinstance(value, basestring): # series[i] = str(value.encode(encoding)) # df[col] = series if verbosity : pbar . finish ( ) return df
7,042
https://github.com/totalgood/twip/blob/5c0411d2acfbe5b421841072814c9152591c03f7/twip/scripts/clean.py#L176-L223
[ "def", "get_relationship_form_for_update", "(", "self", ",", "relationship_id", "=", "None", ")", ":", "if", "relationship_id", "is", "None", ":", "raise", "NullArgument", "(", ")", "try", ":", "url_path", "=", "(", "'/handcar/services/relationship/families/'", "+", "self", ".", "_catalog_idstr", "+", "'/relationships/'", "+", "str", "(", "relationship_id", ")", ")", "relationship", "=", "objects", ".", "Relationship", "(", "self", ".", "_get_request", "(", "url_path", ")", ")", "except", "Exception", ":", "raise", "relationship_form", "=", "objects", ".", "RelationshipForm", "(", "relationship", ".", "_my_map", ")", "self", ".", "_forms", "[", "relationship_form", ".", "get_id", "(", ")", ".", "get_identifier", "(", ")", "]", "=", "not", "UPDATED", "return", "relationship_form" ]
Load all_tweets . csv and run normalize dropna encode before dumping to cleaned_tweets . csv . gz
def run ( verbosity = 1 ) : filepath = os . path . join ( DATA_PATH , 'all_tweets.csv' ) # this should load 100k tweets in about a minute # check the file size and estimate load time from that (see scritps/cat_tweets.py) print ( 'Loading tweets from {} (could take a minute or so)...' . format ( filepath ) ) df = pd . read_csv ( filepath , encoding = 'utf-8' , engine = 'python' ) if 'id' in df . columns : df = df . set_index ( 'id' ) df = normalize ( df ) df = dropna ( df ) df = encode ( df , verbosity = verbosity ) df = clean_labels ( df ) df . to_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC ) # the round-trip to disk cleans up encoding issues so encoding no longer needs to be specified on load df = pd . read_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , index_col = 'id' , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC , low_memory = False ) df . to_csv ( os . path . join ( DATA_PATH , 'cleaned_tweets.csv.gz' ) , compression = 'gzip' , quotechar = '"' , quoting = pd . io . common . csv . QUOTE_NONNUMERIC ) return df
7,043
https://github.com/totalgood/twip/blob/5c0411d2acfbe5b421841072814c9152591c03f7/twip/scripts/clean.py#L225-L252
[ "def", "_create_barrier_entities", "(", "root_pipeline_key", ",", "child_pipeline_key", ",", "purpose", ",", "blocking_slot_keys", ")", ":", "result", "=", "[", "]", "blocking_slot_keys", "=", "list", "(", "blocking_slot_keys", ")", "barrier", "=", "_BarrierRecord", "(", "parent", "=", "child_pipeline_key", ",", "key_name", "=", "purpose", ",", "target", "=", "child_pipeline_key", ",", "root_pipeline", "=", "root_pipeline_key", ",", "blocking_slots", "=", "blocking_slot_keys", ")", "result", ".", "append", "(", "barrier", ")", "for", "slot_key", "in", "blocking_slot_keys", ":", "barrier_index_path", "=", "[", "]", "barrier_index_path", ".", "extend", "(", "slot_key", ".", "to_path", "(", ")", ")", "barrier_index_path", ".", "extend", "(", "child_pipeline_key", ".", "to_path", "(", ")", ")", "barrier_index_path", ".", "extend", "(", "[", "_BarrierIndex", ".", "kind", "(", ")", ",", "purpose", "]", ")", "barrier_index_key", "=", "db", ".", "Key", ".", "from_path", "(", "*", "barrier_index_path", ")", "barrier_index", "=", "_BarrierIndex", "(", "key", "=", "barrier_index_key", ",", "root_pipeline", "=", "root_pipeline_key", ")", "result", ".", "append", "(", "barrier_index", ")", "return", "result" ]
Function to be spawned concurrently consume data keys from input queue and push the resulting dataframes to output map
def data_worker ( * * kwargs ) : if kwargs is not None : if "function" in kwargs : function = kwargs [ "function" ] else : Exception ( "Invalid arguments, no function specified" ) if "input" in kwargs : input_queue = kwargs [ "input" ] else : Exception ( "Invalid Arguments, no input queue" ) if "output" in kwargs : output_map = kwargs [ "output" ] else : Exception ( "Invalid Arguments, no output map" ) if "token" in kwargs : argsdict = { "quandl_token" : kwargs [ "token" ] } else : if "Quandl" in function . __module__ : Exception ( "Invalid Arguments, no Quandl token" ) if ( "source" and "begin" and "end" ) in kwargs : argsdict = { "data_source" : kwargs [ "source" ] , "begin" : kwargs [ "begin" ] , "end" : kwargs [ "end" ] } else : if "pandas.io.data" in function . __module__ : Exception ( "Invalid Arguments, no pandas data source specified" ) if ( "source" in kwargs ) and ( ( "begin" and "end" ) not in kwargs ) : argsdict = { "data_source" : kwargs [ "source" ] } else : if "pandas.io.data" in function . __module__ : Exception ( "Invalid Arguments, no pandas data source specified" ) else : Exception ( "Invalid Arguments" ) retries = 5 while not input_queue . empty ( ) : data_key = input_queue . get ( ) get_data ( function , data_key , output_map , retries , argsdict )
7,044
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L35-L75
[ "def", "verify", "(", "self", ")", ":", "# remember file pointer", "pos", "=", "self", ".", "tell", "(", ")", "self", ".", "seek", "(", "0", ")", "checksum", "=", "0", "for", "chunk", "in", "iter", "(", "lambda", ":", "self", ".", "read", "(", "1024", ")", ",", "b''", ")", ":", "checksum", "=", "crc32", "(", "chunk", ",", "checksum", ")", "# restore file pointer", "self", ".", "seek", "(", "pos", ")", "return", "self", ".", "crc32", "==", "checksum", "&", "0xffffffff" ]
Work through the keys to look up sequentially
def consume_keys ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) self . data_worker ( * * self . worker_args )
7,045
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L145-L150
[ "def", "check_origin", "(", "self", ",", "origin", ")", ":", "mod_opts", "=", "self", ".", "application", ".", "mod_opts", "if", "mod_opts", ".", "get", "(", "'cors_origin'", ")", ":", "return", "bool", "(", "_check_cors_origin", "(", "origin", ",", "mod_opts", "[", "'cors_origin'", "]", ")", ")", "else", ":", "return", "super", "(", "AllEventsHandler", ",", "self", ")", ".", "check_origin", "(", "origin", ")" ]
Work through the keys to look up asynchronously using multiple processes
def consume_keys_asynchronous_processes ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) jobs = multiprocessing . cpu_count ( ) * 4 if ( multiprocessing . cpu_count ( ) * 4 < self . input_queue . qsize ( ) ) else self . input_queue . qsize ( ) pool = multiprocessing . Pool ( processes = jobs , maxtasksperchild = 10 ) for x in range ( jobs ) : pool . apply ( self . data_worker , [ ] , self . worker_args ) pool . close ( ) pool . join ( )
7,046
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L152-L165
[ "def", "login_required", "(", "func", ",", "permission", "=", "None", ")", ":", "@", "wraps", "(", "func", ")", "def", "decorated_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "check_token", "(", ")", ":", "return", "login", "(", ")", "elif", "not", "nago", ".", "core", ".", "has_access", "(", "session", ".", "get", "(", "'token'", ")", ")", ":", "return", "http403", "(", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "decorated_function" ]
Work through the keys to look up asynchronously using multiple threads
def consume_keys_asynchronous_threads ( self ) : print ( "\nLooking up " + self . input_queue . qsize ( ) . __str__ ( ) + " keys from " + self . source_name + "\n" ) jobs = multiprocessing . cpu_count ( ) * 4 if ( multiprocessing . cpu_count ( ) * 4 < self . input_queue . qsize ( ) ) else self . input_queue . qsize ( ) pool = ThreadPool ( jobs ) for x in range ( jobs ) : pool . apply ( self . data_worker , [ ] , self . worker_args ) pool . close ( ) pool . join ( )
7,047
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L167-L181
[ "def", "communityvisibilitystate", "(", "self", ")", ":", "if", "self", ".", "_communityvisibilitystate", "==", "None", ":", "return", "None", "elif", "self", ".", "_communityvisibilitystate", "in", "self", ".", "VisibilityState", ":", "return", "self", ".", "VisibilityState", "[", "self", ".", "_communityvisibilitystate", "]", "else", ":", "#Invalid State", "return", "None" ]
Unpack is a recursive function that will unpack anything that inherits from abstract base class Container provided it is not also inheriting from Python basestring .
def unpack ( self , to_unpack ) : # Python 3 lacks basestring type, work around below try : isinstance ( to_unpack , basestring ) except NameError : basestring = str # Base Case if isinstance ( to_unpack , basestring ) : self . input_queue . put ( to_unpack ) return for possible_key in to_unpack : if isinstance ( possible_key , basestring ) : self . input_queue . put ( possible_key ) elif sys . version_info >= ( 3 , 0 ) : if isinstance ( possible_key , collections . abc . Container ) and not isinstance ( possible_key , basestring ) : self . unpack ( possible_key ) else : raise Exception ( "A type that is neither a string or a container was passed to unpack. " "Aborting!" ) else : if isinstance ( possible_key , collections . Container ) and not isinstance ( possible_key , basestring ) : self . unpack ( possible_key ) else : raise Exception ( "A type that is neither a string or a container was passed to unpack. " "Aborting!" )
7,048
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L202-L239
[ "def", "acquire_writer", "(", "self", ")", ":", "with", "self", ".", "mutex", ":", "while", "self", ".", "rwlock", "!=", "0", ":", "self", ".", "_writer_wait", "(", ")", "self", ".", "rwlock", "=", "-", "1" ]
Set data source to Quandl
def set_source_quandl ( self , quandl_token ) : self . data_worker = data_worker self . worker_args = { "function" : Quandl . get , "input" : self . input_queue , "output" : self . output_map , "token" : quandl_token } self . source_name = "Quandl"
7,049
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L241-L248
[ "def", "delete_network_acl", "(", "network_acl_id", "=", "None", ",", "network_acl_name", "=", "None", ",", "disassociate", "=", "False", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "if", "disassociate", ":", "network_acl", "=", "_get_resource", "(", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "network_acl", "and", "network_acl", ".", "associations", ":", "subnet_id", "=", "network_acl", ".", "associations", "[", "0", "]", ".", "subnet_id", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "conn", ".", "disassociate_network_acl", "(", "subnet_id", ")", "except", "BotoServerError", ":", "pass", "return", "_delete_resource", "(", "resource", "=", "'network_acl'", ",", "name", "=", "network_acl_name", ",", "resource_id", "=", "network_acl_id", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")" ]
Set data source to Google Finance
def set_source_google_finance ( self ) : self . data_worker = data_worker self . worker_args = { "function" : pandas . io . data . DataReader , "input" : self . input_queue , "output" : self . output_map , "source" : 'google' } self . source_name = "Google Finance"
7,050
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L259-L266
[ "def", "delete", "(", "self", ",", "msg", ",", "claim_id", "=", "None", ")", ":", "msg_id", "=", "utils", ".", "get_id", "(", "msg", ")", "if", "claim_id", ":", "uri", "=", "\"/%s/%s?claim_id=%s\"", "%", "(", "self", ".", "uri_base", ",", "msg_id", ",", "claim_id", ")", "else", ":", "uri", "=", "\"/%s/%s\"", "%", "(", "self", ".", "uri_base", ",", "msg_id", ")", "return", "self", ".", "_delete", "(", "uri", ")" ]
Set data source to yahoo finance specifically to download financial options data
def set_source_yahoo_options ( self ) : self . data_worker = data_worker self . worker_args = { "function" : Options , "input" : self . input_queue , "output" : self . output_map , "source" : 'yahoo' } self . source_name = "Yahoo Finance Options"
7,051
https://github.com/briwilcox/Concurrent-Pandas/blob/22cb392dacb712e1bdb5b60c6ba7015c38445c99/concurrentpandas.py#L277-L284
[ "def", "validate_packing", "(", "self", ")", ":", "surface", "=", "Rectangle", "(", "0", ",", "0", ",", "self", ".", "width", ",", "self", ".", "height", ")", "for", "r", "in", "self", ":", "if", "not", "surface", ".", "contains", "(", "r", ")", ":", "raise", "Exception", "(", "\"Rectangle placed outside surface\"", ")", "rectangles", "=", "[", "r", "for", "r", "in", "self", "]", "if", "len", "(", "rectangles", ")", "<=", "1", ":", "return", "for", "r1", "in", "range", "(", "0", ",", "len", "(", "rectangles", ")", "-", "2", ")", ":", "for", "r2", "in", "range", "(", "r1", "+", "1", ",", "len", "(", "rectangles", ")", "-", "1", ")", ":", "if", "rectangles", "[", "r1", "]", ".", "intersects", "(", "rectangles", "[", "r2", "]", ")", ":", "raise", "Exception", "(", "\"Rectangle collision detected\"", ")" ]
A decorator to ensure a function is run with jQuery available .
def load_jquery ( func ) : @ wraps ( func ) def wrapped ( browser , * args , * * kwargs ) : """Run the function, loading jQuery if needed.""" try : return func ( browser , * args , * * kwargs ) except WebDriverException as ex : if not is_jquery_not_defined_error ( ex . msg ) : raise load_script ( browser , JQUERY ) @ wait_for def jquery_available ( ) : """Assert that jQuery has loaded.""" try : return browser . execute_script ( 'return $' ) except WebDriverException : raise AssertionError ( "jQuery is not loaded" ) jquery_available ( ) return func ( browser , * args , * * kwargs ) return wrapped
7,052
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L64-L98
[ "def", "delete_classifier", "(", "self", ",", "classifier_id", ",", "*", "*", "kwargs", ")", ":", "if", "classifier_id", "is", "None", ":", "raise", "ValueError", "(", "'classifier_id must be provided'", ")", "headers", "=", "{", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'watson_vision_combined'", ",", "'V3'", ",", "'delete_classifier'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'version'", ":", "self", ".", "version", "}", "url", "=", "'/v3/classifiers/{0}'", ".", "format", "(", "*", "self", ".", "_encode_path_vars", "(", "classifier_id", ")", ")", "response", "=", "self", ".", "request", "(", "method", "=", "'DELETE'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "accept_json", "=", "True", ")", "return", "response" ]
Assert an element exists matching the given selector .
def check_element_by_selector ( self , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) if not elems : raise AssertionError ( "Expected matching elements, none found." )
7,053
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L133-L137
[ "def", "detail_dict", "(", "self", ")", ":", "d", "=", "self", ".", "dict", "def", "aug_col", "(", "c", ")", ":", "d", "=", "c", ".", "dict", "d", "[", "'stats'", "]", "=", "[", "s", ".", "dict", "for", "s", "in", "c", ".", "stats", "]", "return", "d", "d", "[", "'table'", "]", "=", "self", ".", "table", ".", "dict", "d", "[", "'table'", "]", "[", "'columns'", "]", "=", "[", "aug_col", "(", "c", ")", "for", "c", "in", "self", ".", "table", ".", "columns", "]", "return", "d" ]
Assert an element does not exist matching the given selector .
def check_no_element_by_selector ( self , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) if elems : raise AssertionError ( "Expected no matching elements, found {}." . format ( len ( elems ) ) )
7,054
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L142-L147
[ "def", "_init_project_service", "(", "self", ",", "version", ")", ":", "project_cfg", "=", "self", ".", "_load_config_section", "(", "CONFIG_PROJECT_SECTION", ")", "self", ".", "_token_project", "=", "project_cfg", "[", "CONFIG_TOKEN", "]", "proto", "=", "project_cfg", "[", "CONFIG_PROTOCOL", "]", "host", "=", "project_cfg", "[", "CONFIG_HOST", "]", "self", ".", "_project", "=", "ProjectService", "(", "host", ",", "version", ")", "self", ".", "_project", ".", "base_protocol", "=", "proto", "self", ".", "_project", ".", "set_auth", "(", "self", ".", "_token_project", ")" ]
Assert an element exists matching the given selector within the given time period .
def wait_for_element_by_selector ( self , selector , seconds ) : def assert_element_present ( ) : """Assert an element matching the given selector exists.""" if not find_elements_by_jquery ( world . browser , selector ) : raise AssertionError ( "Expected a matching element." ) wait_for ( assert_element_present ) ( timeout = int ( seconds ) )
7,055
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L152-L163
[ "def", "imdecode", "(", "str_img", ",", "flag", "=", "1", ")", ":", "hdl", "=", "NDArrayHandle", "(", ")", "check_call", "(", "_LIB", ".", "MXCVImdecode", "(", "ctypes", ".", "c_char_p", "(", "str_img", ")", ",", "mx_uint", "(", "len", "(", "str_img", ")", ")", ",", "flag", ",", "ctypes", ".", "byref", "(", "hdl", ")", ")", ")", "return", "mx", ".", "nd", ".", "NDArray", "(", "hdl", ")" ]
Assert n elements exist matching the given selector .
def count_elements_exactly_by_selector ( self , number , selector ) : elems = find_elements_by_jquery ( world . browser , selector ) number = int ( number ) if len ( elems ) != number : raise AssertionError ( "Expected {} elements, found {}" . format ( number , len ( elems ) ) )
7,056
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L168-L176
[ "async", "def", "update_lease_async", "(", "self", ",", "lease", ")", ":", "if", "lease", "is", "None", ":", "return", "False", "if", "not", "lease", ".", "token", ":", "return", "False", "_logger", ".", "debug", "(", "\"Updating lease %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ")", "# First, renew the lease to make sure the update will go through.", "if", "await", "self", ".", "renew_lease_async", "(", "lease", ")", ":", "try", ":", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "lease", ".", "partition_id", ",", "json", ".", "dumps", "(", "lease", ".", "serializable", "(", ")", ")", ",", "lease_id", "=", "lease", ".", "token", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to update lease %r %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ",", "err", ")", "raise", "err", "else", ":", "return", "False", "return", "True" ]
Fill in the form element matching the CSS selector .
def fill_in_by_selector ( self , selector , value ) : elem = find_element_by_jquery ( world . browser , selector ) elem . clear ( ) elem . send_keys ( value )
7,057
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L181-L185
[ "def", "detail_dict", "(", "self", ")", ":", "d", "=", "self", ".", "dict", "def", "aug_col", "(", "c", ")", ":", "d", "=", "c", ".", "dict", "d", "[", "'stats'", "]", "=", "[", "s", ".", "dict", "for", "s", "in", "c", ".", "stats", "]", "return", "d", "d", "[", "'table'", "]", "=", "self", ".", "table", ".", "dict", "d", "[", "'table'", "]", "[", "'columns'", "]", "=", "[", "aug_col", "(", "c", ")", "for", "c", "in", "self", ".", "table", ".", "columns", "]", "return", "d" ]
Submit the form matching the CSS selector .
def submit_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) elem . submit ( )
7,058
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L190-L193
[ "def", "update_experiment", "(", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "return", "None", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "experiment_dict", "[", "key", "]", ",", "dict", ")", ":", "if", "experiment_dict", "[", "key", "]", ".", "get", "(", "'status'", ")", "!=", "'STOPPED'", ":", "nni_config", "=", "Config", "(", "experiment_dict", "[", "key", "]", "[", "'fileName'", "]", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "not", "detect_process", "(", "rest_pid", ")", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "'STOPPED'", ")", "continue", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "startTime", ",", "endTime", "=", "get_experiment_time", "(", "rest_port", ")", "if", "startTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'startTime'", ",", "startTime", ")", "if", "endTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'endTime'", ",", "endTime", ")", "status", "=", "get_experiment_status", "(", "rest_port", ")", "if", "status", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "status", ")" ]
Check the checkbox matching the CSS selector .
def check_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) if not elem . is_selected ( ) : elem . click ( )
7,059
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L198-L202
[ "def", "_extract", "(", "param_names", ":", "List", "[", "str", "]", ",", "params", ":", "Dict", "[", "str", ",", "mx", ".", "nd", ".", "NDArray", "]", ",", "ext_params", ":", "Dict", "[", "str", ",", "np", ".", "ndarray", "]", ")", "->", "List", "[", "str", "]", ":", "remaining_param_names", "=", "list", "(", "param_names", ")", "for", "name", "in", "param_names", ":", "if", "name", "in", "params", ":", "logger", ".", "info", "(", "\"\\tFound '%s': shape=%s\"", ",", "name", ",", "str", "(", "params", "[", "name", "]", ".", "shape", ")", ")", "ext_params", "[", "name", "]", "=", "params", "[", "name", "]", ".", "asnumpy", "(", ")", "remaining_param_names", ".", "remove", "(", "name", ")", "return", "remaining_param_names" ]
Click the element matching the CSS selector .
def click_by_selector ( self , selector ) : # No need for separate button press step with selector style. elem = find_element_by_jquery ( world . browser , selector ) elem . click ( )
7,060
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L207-L211
[ "def", "prepare_hmet_lsm", "(", "self", ",", "lsm_data_var_map_array", ",", "hmet_ascii_output_folder", "=", "None", ",", "netcdf_file_path", "=", "None", ")", ":", "if", "self", ".", "l2g", "is", "None", ":", "raise", "ValueError", "(", "\"LSM converter not loaded ...\"", ")", "with", "tmp_chdir", "(", "self", ".", "project_manager", ".", "project_directory", ")", ":", "# GSSHA simulation does not work after HMET data is finished", "self", ".", "_update_simulation_end_from_lsm", "(", ")", "# HMET CARDS", "if", "netcdf_file_path", "is", "not", "None", ":", "self", ".", "l2g", ".", "lsm_data_to_subset_netcdf", "(", "netcdf_file_path", ",", "lsm_data_var_map_array", ")", "self", ".", "_update_card", "(", "\"HMET_NETCDF\"", ",", "netcdf_file_path", ",", "True", ")", "self", ".", "project_manager", ".", "deleteCard", "(", "'HMET_ASCII'", ",", "self", ".", "db_session", ")", "else", ":", "if", "\"{0}\"", "in", "hmet_ascii_output_folder", "and", "\"{1}\"", "in", "hmet_ascii_output_folder", ":", "hmet_ascii_output_folder", "=", "hmet_ascii_output_folder", ".", "format", "(", "self", ".", "simulation_start", ".", "strftime", "(", "\"%Y%m%d%H%M\"", ")", ",", "self", ".", "simulation_end", ".", "strftime", "(", "\"%Y%m%d%H%M\"", ")", ")", "self", ".", "l2g", ".", "lsm_data_to_arc_ascii", "(", "lsm_data_var_map_array", ",", "main_output_folder", "=", "os", ".", "path", ".", "join", "(", "self", ".", "gssha_directory", ",", "hmet_ascii_output_folder", ")", ")", "self", ".", "_update_card", "(", "\"HMET_ASCII\"", ",", "os", ".", "path", ".", "join", "(", "hmet_ascii_output_folder", ",", "'hmet_file_list.txt'", ")", ",", "True", ")", "self", ".", "project_manager", ".", "deleteCard", "(", "'HMET_NETCDF'", ",", "self", ".", "db_session", ")", "# UPDATE GMT CARD", "self", ".", "_update_gmt", "(", ")" ]
Navigate to the href of the element matching the CSS selector .
def follow_link_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) href = elem . get_attribute ( 'href' ) world . browser . get ( href )
7,061
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L216-L224
[ "def", "get_dbs", "(", ")", ":", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "'DBS'", ")", "response", "=", "requests", ".", "get", "(", "url", ")", "dbs", "=", "response", ".", "content", ".", "decode", "(", "'ascii'", ")", ".", "splitlines", "(", ")", "dbs", "=", "[", "re", ".", "sub", "(", "'\\t{2,}'", ",", "'\\t'", ",", "line", ")", ".", "split", "(", "'\\t'", ")", "for", "line", "in", "dbs", "]", "return", "dbs" ]
Assert the option matching the CSS selector is selected .
def is_selected_by_selector ( self , selector ) : elem = find_element_by_jquery ( world . browser , selector ) if not elem . is_selected ( ) : raise AssertionError ( "Element expected to be selected." )
7,062
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L229-L233
[ "def", "startDataStoreMachine", "(", "self", ",", "dataStoreItemName", ",", "machineName", ")", ":", "url", "=", "self", ".", "_url", "+", "\"/items/enterpriseDatabases/%s/machines/%s/start\"", "%", "(", "dataStoreItemName", ",", "machineName", ")", "params", "=", "{", "\"f\"", ":", "\"json\"", "}", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
Select the option matching the CSS selector .
def select_by_selector ( self , selector ) : option = find_element_by_jquery ( world . browser , selector ) selectors = find_parents_by_jquery ( world . browser , selector ) if not selectors : raise AssertionError ( "No parent element found for the option." ) selector = selectors [ 0 ] selector . click ( ) sleep ( 0.3 ) option . click ( ) if not option . is_selected ( ) : raise AssertionError ( "Option should have become selected after clicking it." )
7,063
https://github.com/aloetesting/aloe_webdriver/blob/65d847da4bdc63f9c015cb19d4efdee87df8ffad/aloe_webdriver/css.py#L238-L250
[ "def", "startDataStoreMachine", "(", "self", ",", "dataStoreItemName", ",", "machineName", ")", ":", "url", "=", "self", ".", "_url", "+", "\"/items/enterpriseDatabases/%s/machines/%s/start\"", "%", "(", "dataStoreItemName", ",", "machineName", ")", "params", "=", "{", "\"f\"", ":", "\"json\"", "}", "return", "self", ".", "_post", "(", "url", "=", "url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "self", ".", "_securityHandler", ",", "proxy_url", "=", "self", ".", "_proxy_url", ",", "proxy_port", "=", "self", ".", "_proxy_port", ")" ]
Run the CifFilterCalculation on the CifData input node .
def run_filter_calculation ( self ) : inputs = { 'cif' : self . inputs . cif , 'code' : self . inputs . cif_filter , 'parameters' : self . inputs . cif_filter_parameters , 'metadata' : { 'options' : self . inputs . options . get_dict ( ) , } } calculation = self . submit ( CifFilterCalculation , * * inputs ) self . report ( 'submitted {}<{}>' . format ( CifFilterCalculation . __name__ , calculation . uuid ) ) return ToContext ( cif_filter = calculation )
7,064
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L87-L101
[ "def", "get_active_token", "(", "self", ")", ":", "expire_time", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"expires\"", ")", "access_token", "=", "self", ".", "store_handler", ".", "has_value", "(", "\"access_token\"", ")", "if", "expire_time", "and", "access_token", ":", "expire_time", "=", "self", ".", "store_handler", ".", "get_value", "(", "\"expires\"", ")", "if", "not", "datetime", ".", "now", "(", ")", "<", "datetime", ".", "fromtimestamp", "(", "float", "(", "expire_time", ")", ")", ":", "self", ".", "store_handler", ".", "delete_value", "(", "\"access_token\"", ")", "self", ".", "store_handler", ".", "delete_value", "(", "\"expires\"", ")", "logger", ".", "info", "(", "'Access token expired, going to get new token'", ")", "self", ".", "auth", "(", ")", "else", ":", "logger", ".", "info", "(", "'Access token noy expired yet'", ")", "else", ":", "self", ".", "auth", "(", ")", "return", "self", ".", "store_handler", ".", "get_value", "(", "\"access_token\"", ")" ]
Inspect the result of the CifFilterCalculation verifying that it produced a CifData output node .
def inspect_filter_calculation ( self ) : try : node = self . ctx . cif_filter self . ctx . cif = node . outputs . cif except exceptions . NotExistent : self . report ( 'aborting: CifFilterCalculation<{}> did not return the required cif output' . format ( node . uuid ) ) return self . exit_codes . ERROR_CIF_FILTER_FAILED
7,065
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L103-L110
[ "def", "apply", "(", "self", ",", "read_tuple_name", ",", "read_tuple_id", "=", "None", ",", "synchronize_widths", "=", "True", ")", ":", "parts", "=", "read_tuple_name", ".", "split", "(", "\"__\"", ")", "parts", "[", "0", "]", "=", "self", ".", "_fill_right", "(", "parts", "[", "0", "]", ",", "\"-\"", ",", "self", ".", "prefix_width", ")", "if", "read_tuple_id", "is", "not", "None", ":", "parts", "[", "1", "]", "=", "\"{:x}\"", ".", "format", "(", "read_tuple_id", ")", "parts", "[", "1", "]", "=", "self", ".", "_fill_left", "(", "parts", "[", "1", "]", ",", "\"0\"", ",", "self", ".", "read_tuple_id_width", ")", "if", "synchronize_widths", ":", "new_segments", "=", "[", "]", "segments", "=", "parts", "[", "2", "]", "[", "1", ":", "-", "1", "]", ".", "split", "(", "\"),(\"", ")", "for", "segment", "in", "segments", ":", "values", "=", "segment", ".", "split", "(", "\",\"", ")", "values", "[", "0", "]", "=", "values", "[", "0", "]", ".", "zfill", "(", "self", ".", "genome_id_width", ")", "values", "[", "1", "]", "=", "values", "[", "1", "]", ".", "zfill", "(", "self", ".", "chr_id_width", ")", "values", "[", "3", "]", "=", "values", "[", "3", "]", ".", "zfill", "(", "self", ".", "coor_width", ")", "values", "[", "4", "]", "=", "values", "[", "4", "]", ".", "zfill", "(", "self", ".", "coor_width", ")", "new_segments", ".", "append", "(", "\"(\"", "+", "\",\"", ".", "join", "(", "values", ")", "+", "\")\"", ")", "parts", "[", "2", "]", "=", "\",\"", ".", "join", "(", "new_segments", ")", "return", "\"__\"", ".", "join", "(", "parts", ")" ]
Run the CifSelectCalculation on the CifData output node of the CifFilterCalculation .
def run_select_calculation ( self ) : inputs = { 'cif' : self . ctx . cif , 'code' : self . inputs . cif_select , 'parameters' : self . inputs . cif_select_parameters , 'metadata' : { 'options' : self . inputs . options . get_dict ( ) , } } calculation = self . submit ( CifSelectCalculation , * * inputs ) self . report ( 'submitted {}<{}>' . format ( CifSelectCalculation . __name__ , calculation . uuid ) ) return ToContext ( cif_select = calculation )
7,066
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L112-L126
[ "def", "get_booking", "(", "request", ")", ":", "booking", "=", "None", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", ":", "try", ":", "booking", "=", "Booking", ".", "objects", ".", "get", "(", "user", "=", "request", ".", "user", ",", "booking_status__slug", "=", "'inprogress'", ")", "except", "Booking", ".", "DoesNotExist", ":", "# The user does not have any open bookings", "pass", "else", ":", "session", "=", "Session", ".", "objects", ".", "get", "(", "session_key", "=", "request", ".", "session", ".", "session_key", ")", "try", ":", "booking", "=", "Booking", ".", "objects", ".", "get", "(", "session", "=", "session", ")", "except", "Booking", ".", "DoesNotExist", ":", "# The user does not have any bookings in his session", "pass", "return", "booking" ]
Inspect the result of the CifSelectCalculation verifying that it produced a CifData output node .
def inspect_select_calculation ( self ) : try : node = self . ctx . cif_select self . ctx . cif = node . outputs . cif except exceptions . NotExistent : self . report ( 'aborting: CifSelectCalculation<{}> did not return the required cif output' . format ( node . uuid ) ) return self . exit_codes . ERROR_CIF_SELECT_FAILED
7,067
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L128-L135
[ "def", "ensure_compatible_admin", "(", "view", ")", ":", "def", "wrapper", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user_roles", "=", "request", ".", "user", ".", "user_data", ".", "get", "(", "'roles'", ",", "[", "]", ")", "if", "len", "(", "user_roles", ")", "!=", "1", ":", "context", "=", "{", "'message'", ":", "'I need to be able to manage user accounts. '", "'My username is %s'", "%", "request", ".", "user", ".", "username", "}", "return", "render", "(", "request", ",", "'mtp_common/user_admin/incompatible-admin.html'", ",", "context", "=", "context", ")", "return", "view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
Parse a StructureData from the cleaned CifData returned by the CifSelectCalculation .
def parse_cif_structure ( self ) : from aiida_codtools . workflows . functions . primitive_structure_from_cif import primitive_structure_from_cif if self . ctx . cif . has_unknown_species : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_UNKNOWN_SPECIES self . report ( self . ctx . exit_code . message ) return if self . ctx . cif . has_undefined_atomic_sites : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_UNDEFINED_ATOMIC_SITES self . report ( self . ctx . exit_code . message ) return if self . ctx . cif . has_attached_hydrogens : self . ctx . exit_code = self . exit_codes . ERROR_CIF_HAS_ATTACHED_HYDROGENS self . report ( self . ctx . exit_code . message ) return parse_inputs = { 'cif' : self . ctx . cif , 'parse_engine' : self . inputs . parse_engine , 'site_tolerance' : self . inputs . site_tolerance , 'symprec' : self . inputs . symprec , } try : structure , node = primitive_structure_from_cif . run_get_node ( * * parse_inputs ) except Exception : # pylint: disable=broad-except self . ctx . exit_code = self . exit_codes . ERROR_CIF_STRUCTURE_PARSING_FAILED self . report ( self . ctx . exit_code . message ) return if node . is_failed : self . ctx . exit_code = self . exit_codes ( node . exit_status ) # pylint: disable=too-many-function-args self . report ( self . ctx . exit_code . message ) else : self . ctx . structure = structure
7,068
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L141-L178
[ "async", "def", "_wait_for_new", "(", "self", ",", "entity_type", ",", "entity_id", ")", ":", "# if the entity is already in the model, just return it", "if", "entity_id", "in", "self", ".", "state", ".", "_live_entity_map", "(", "entity_type", ")", ":", "return", "self", ".", "state", ".", "_live_entity_map", "(", "entity_type", ")", "[", "entity_id", "]", "return", "await", "self", ".", "_wait", "(", "entity_type", ",", "entity_id", ",", "None", ")" ]
If successfully created add the cleaned CifData and StructureData as output nodes to the workchain .
def results ( self ) : self . out ( 'cif' , self . ctx . cif ) if 'group_cif' in self . inputs : self . inputs . group_cif . add_nodes ( [ self . ctx . cif ] ) if 'group_structure' in self . inputs : try : structure = self . ctx . structure except AttributeError : return self . ctx . exit_code else : self . inputs . group_structure . add_nodes ( [ structure ] ) self . out ( 'structure' , structure ) self . report ( 'workchain finished successfully' )
7,069
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/workflows/cif_clean.py#L180-L202
[ "def", "ulocalized_gmt0_time", "(", "self", ",", "time", ",", "context", ",", "request", ")", ":", "value", "=", "get_date", "(", "context", ",", "time", ")", "if", "not", "value", ":", "return", "\"\"", "# DateTime is stored with TimeZone, but DateTimeWidget omits TZ", "value", "=", "value", ".", "toZone", "(", "\"GMT+0\"", ")", "return", "self", ".", "ulocalized_time", "(", "value", ",", "context", ",", "request", ")" ]
Return a Node of a given class and given value .
def get_input_node ( cls , value ) : from aiida import orm if cls in ( orm . Bool , orm . Float , orm . Int , orm . Str ) : result = orm . QueryBuilder ( ) . append ( cls , filters = { 'attributes.value' : value } ) . first ( ) if result is None : node = cls ( value ) . store ( ) else : node = result [ 0 ] elif cls is orm . Dict : result = orm . QueryBuilder ( ) . append ( cls , filters = { 'attributes' : { '==' : value } } ) . first ( ) if result is None : node = cls ( dict = value ) . store ( ) else : node = result [ 0 ] else : raise NotImplementedError return node
7,070
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/aiida_codtools/common/utils.py#L7-L38
[ "def", "skeleton", "(", "files", ",", "metadata", ",", "sqlite_extensions", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "metadata", ")", ":", "click", ".", "secho", "(", "\"File {} already exists, will not over-write\"", ".", "format", "(", "metadata", ")", ",", "bg", "=", "\"red\"", ",", "fg", "=", "\"white\"", ",", "bold", "=", "True", ",", "err", "=", "True", ",", ")", "sys", ".", "exit", "(", "1", ")", "app", "=", "Datasette", "(", "files", ",", "sqlite_extensions", "=", "sqlite_extensions", ")", "databases", "=", "{", "}", "for", "database_name", ",", "info", "in", "app", ".", "inspect", "(", ")", ".", "items", "(", ")", ":", "databases", "[", "database_name", "]", "=", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"queries\"", ":", "{", "}", ",", "\"tables\"", ":", "{", "table_name", ":", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"units\"", ":", "{", "}", ",", "}", "for", "table_name", "in", "(", "info", ".", "get", "(", "\"tables\"", ")", "or", "{", "}", ")", "}", ",", "}", "open", "(", "metadata", ",", "\"w\"", ")", ".", "write", "(", "json", ".", "dumps", "(", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"databases\"", ":", "databases", ",", "}", ",", "indent", "=", "4", ",", ")", ")", "click", ".", "echo", "(", "\"Wrote skeleton to {}\"", ".", "format", "(", "metadata", ")", ")" ]
Bind to filters form .
def bind ( self , form ) : field = self . field ( default = self . default , * * self . field_kwargs ) form . _fields [ self . name ] = field . bind ( form , self . name , prefix = form . _prefix )
7,071
https://github.com/klen/muffin-admin/blob/404dc8e5107e943b7c42fa21c679c34ddb4de1d5/muffin_admin/filters.py#L33-L36
[ "def", "_set_vibration_nix", "(", "self", ",", "left_motor", ",", "right_motor", ",", "duration", ")", ":", "code", "=", "self", ".", "__get_vibration_code", "(", "left_motor", ",", "right_motor", ",", "duration", ")", "secs", ",", "msecs", "=", "convert_timeval", "(", "time", ".", "time", "(", ")", ")", "outer_event", "=", "struct", ".", "pack", "(", "EVENT_FORMAT", ",", "secs", ",", "msecs", ",", "0x15", ",", "code", ",", "1", ")", "self", ".", "_write_device", ".", "write", "(", "outer_event", ")", "self", ".", "_write_device", ".", "flush", "(", ")" ]
Put together the default configuration path based on OS .
def get_config_path ( ) : dir_path = ( os . getenv ( 'APPDATA' ) if os . name == "nt" else os . path . expanduser ( '~' ) ) return os . path . join ( dir_path , '.vtjp' )
7,072
https://github.com/persandstrom/python-vasttrafik/blob/9c657fde1e91229c5878ea25530260596d296d37/vasttrafik/__main__.py#L18-L22
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Print json document as table
def print_table ( document , * columns ) : headers = [ ] for _ , header in columns : headers . append ( header ) table = [ ] for element in document : row = [ ] for item , _ in columns : if item in element : row . append ( element [ item ] ) else : row . append ( None ) table . append ( row ) print ( tabulate . tabulate ( table , headers ) )
7,073
https://github.com/persandstrom/python-vasttrafik/blob/9c657fde1e91229c5878ea25530260596d296d37/vasttrafik/__main__.py#L43-L57
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
Print trip table
def print_trip_table ( document ) : headers = [ 'Alt.' , 'Name' , 'Time' , 'Track' , 'Direction' , 'Dest.' , 'Track' , 'Arrival' ] table = [ ] altnr = 0 for alternative in document : altnr += 1 first_trip_in_alt = True if not isinstance ( alternative [ 'Leg' ] , list ) : alternative [ 'Leg' ] = [ alternative [ 'Leg' ] ] for part in alternative [ 'Leg' ] : orig = part [ 'Origin' ] dest = part [ 'Destination' ] row = [ altnr if first_trip_in_alt else None , part [ 'name' ] , orig [ 'rtTime' ] if 'rtTime' in orig else orig [ 'time' ] , orig [ 'track' ] , part [ 'direction' ] if 'direction' in part else None , dest [ 'name' ] , dest [ 'track' ] , dest [ 'rtTime' ] if 'rtTime' in dest else dest [ 'time' ] , ] table . append ( row ) first_trip_in_alt = False print ( tabulate . tabulate ( table , headers ) )
7,074
https://github.com/persandstrom/python-vasttrafik/blob/9c657fde1e91229c5878ea25530260596d296d37/vasttrafik/__main__.py#L60-L93
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
Make a property on an instance of an SLDNode . If cls is omitted the property is assumed to be a text node with no corresponding class object . If name is omitted the property is assumed to be a complex node with a corresponding class wrapper .
def makeproperty ( ns , cls = None , name = None , docstring = '' , descendant = True ) : def get_property ( self ) : """ A generic property getter. """ if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : if cls is None : return xpath [ 0 ] . text else : elem = cls . __new__ ( cls ) cls . __init__ ( elem , self , descendant = descendant ) return elem else : return None def set_property ( self , value ) : """ A generic property setter. """ if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : if cls is None : xpath [ 0 ] . text = value else : xpath [ 0 ] = value . _node else : if cls is None : elem = self . _node . makeelement ( '{%s}%s' % ( SLDNode . _nsmap [ ns ] , name ) , nsmap = SLDNode . _nsmap ) elem . text = value self . _node . append ( elem ) else : self . _node . append ( value . _node ) def del_property ( self ) : """ A generic property deleter. """ if cls is None : xpath = '%s:%s' % ( ns , name ) else : xpath = '%s:%s' % ( ns , cls . __name__ ) xpath = self . _node . xpath ( xpath , namespaces = SLDNode . _nsmap ) if len ( xpath ) == 1 : self . _node . remove ( xpath [ 0 ] ) return property ( get_property , set_property , del_property , docstring )
7,075
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L83-L160
[ "def", "_download_segments", "(", "filename", ",", "url", ",", "segments", ")", ":", "gribfile", "=", "open", "(", "filename", ",", "'w'", ")", "for", "start", ",", "end", "in", "segments", ":", "req", "=", "urllib2", ".", "Request", "(", "url", ")", "req", ".", "add_header", "(", "'User-Agent'", ",", "'caelum/0.1 +https://github.com/nrcharles/caelum'", ")", "if", "end", ":", "req", ".", "headers", "[", "'Range'", "]", "=", "'bytes=%s-%s'", "%", "(", "start", ",", "end", ")", "else", ":", "req", ".", "headers", "[", "'Range'", "]", "=", "'bytes=%s'", "%", "(", "start", ")", "opener", "=", "urllib2", ".", "build_opener", "(", ")", "gribfile", ".", "write", "(", "opener", ".", "open", "(", "req", ")", ".", "read", "(", ")", ")", "gribfile", ".", "close", "(", ")" ]
Attempt to get the only child element from this SLDNode . If the node does not exist create the element attach it to the DOM and return the class object that wraps the node .
def get_or_create_element ( self , ns , name ) : if len ( self . _node . xpath ( '%s:%s' % ( ns , name ) , namespaces = SLDNode . _nsmap ) ) == 1 : return getattr ( self , name ) return self . create_element ( ns , name )
7,076
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L162-L179
[ "def", "to_csv", "(", "self", ",", "file", ")", ":", "file", ".", "write", "(", "\"time,value\\n\"", ")", "for", "t", ",", "v", "in", "self", ":", "file", ".", "write", "(", "\"%f,%f\\n\"", "%", "(", "t", ",", "v", ")", ")" ]
Create an element as a child of this SLDNode .
def create_element ( self , ns , name ) : elem = self . _node . makeelement ( '{%s}%s' % ( SLDNode . _nsmap [ ns ] , name ) , nsmap = SLDNode . _nsmap ) self . _node . append ( elem ) return getattr ( self , name )
7,077
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L181-L196
[ "def", "get_product_info", "(", "self", ",", "apps", "=", "[", "]", ",", "packages", "=", "[", "]", ",", "timeout", "=", "15", ")", ":", "if", "not", "apps", "and", "not", "packages", ":", "return", "message", "=", "MsgProto", "(", "EMsg", ".", "ClientPICSProductInfoRequest", ")", "for", "app", "in", "apps", ":", "app_info", "=", "message", ".", "body", ".", "apps", ".", "add", "(", ")", "app_info", ".", "only_public", "=", "False", "if", "isinstance", "(", "app", ",", "tuple", ")", ":", "app_info", ".", "appid", ",", "app_info", ".", "access_token", "=", "app", "else", ":", "app_info", ".", "appid", "=", "app", "for", "package", "in", "packages", ":", "package_info", "=", "message", ".", "body", ".", "packages", ".", "add", "(", ")", "if", "isinstance", "(", "package", ",", "tuple", ")", ":", "package_info", ".", "appid", ",", "package_info", ".", "access_token", "=", "package", "else", ":", "package_info", ".", "packageid", "=", "package", "message", ".", "body", ".", "meta_data_only", "=", "False", "job_id", "=", "self", ".", "send_job", "(", "message", ")", "data", "=", "dict", "(", "apps", "=", "{", "}", ",", "packages", "=", "{", "}", ")", "while", "True", ":", "chunk", "=", "self", ".", "wait_event", "(", "job_id", ",", "timeout", "=", "timeout", ")", "if", "chunk", "is", "None", ":", "return", "chunk", "=", "chunk", "[", "0", "]", ".", "body", "for", "app", "in", "chunk", ".", "apps", ":", "data", "[", "'apps'", "]", "[", "app", ".", "appid", "]", "=", "vdf", ".", "loads", "(", "app", ".", "buffer", "[", ":", "-", "1", "]", ".", "decode", "(", "'utf-8'", ",", "'replace'", ")", ")", "[", "'appinfo'", "]", "for", "pkg", "in", "chunk", ".", "packages", ":", "data", "[", "'packages'", "]", "[", "pkg", ".", "packageid", "]", "=", "vdf", ".", "binary_loads", "(", "pkg", ".", "buffer", "[", "4", ":", "]", ")", "[", "str", "(", "pkg", ".", "packageid", ")", "]", "if", "not", "chunk", ".", "response_pending", ":", "break", "return", "data" ]
Normalize this node and all rules contained within . The SLD model is modified in place .
def normalize ( self ) : for i , rnode in enumerate ( self . _nodes ) : rule = Rule ( self , i - 1 , descendant = False ) rule . normalize ( )
7,078
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L1137-L1144
[ "def", "from_offset", "(", "tu", ",", "file", ",", "offset", ")", ":", "return", "conf", ".", "lib", ".", "clang_getLocationForOffset", "(", "tu", ",", "file", ",", "offset", ")" ]
Validate the current file against the SLD schema . This first normalizes the SLD document then validates it . Any schema validation error messages are logged at the INFO level .
def validate ( self ) : self . normalize ( ) if self . _node is None : logging . debug ( 'The node is empty, and cannot be validated.' ) return False if self . _schema is None : self . _schema = XMLSchema ( self . _schemadoc ) is_valid = self . _schema . validate ( self . _node ) for msg in self . _schema . error_log : logging . info ( 'Line:%d, Column:%d -- %s' , msg . line , msg . column , msg . message ) return is_valid
7,079
https://github.com/azavea/python-sld/blob/70e363782b39249bc9512a78dbbc45aaee52aaf5/sld/__init__.py#L1488-L1511
[ "def", "unbind", "(", "self", ")", ":", "for", "variable", "in", "self", ".", "variables", ":", "self", ".", "__unbind_variable", "(", "variable", ")", "for", "result", "in", "self", ".", "results", ":", "self", ".", "__unbind_result", "(", "result", ")" ]
link helper files to given path
def helper ( path ) : if sys . platform . startswith ( "win" ) : # link batch files src_path = os . path . join ( PHLB_BASE_DIR , "helper_cmd" ) elif sys . platform . startswith ( "linux" ) : # link shell scripts src_path = os . path . join ( PHLB_BASE_DIR , "helper_sh" ) else : print ( "TODO: %s" % sys . platform ) return if not os . path . isdir ( src_path ) : raise RuntimeError ( "Helper script path not found here: '%s'" % src_path ) for entry in scandir ( src_path ) : print ( "_" * 79 ) print ( "Link file: '%s'" % entry . name ) src = entry . path dst = os . path . join ( path , entry . name ) if os . path . exists ( dst ) : print ( "Remove old file '%s'" % dst ) try : os . remove ( dst ) except OSError as err : print ( "\nERROR:\n%s\n" % err ) continue print ( "source.....: '%s'" % src ) print ( "destination: '%s'" % dst ) try : os . link ( src , dst ) except OSError as err : print ( "\nERROR:\n%s\n" % err ) continue
7,080
https://github.com/jedie/PyHardLinkBackup/blob/be28666834d2d9e3d8aac1b661cb2d5bd4056c29/PyHardLinkBackup/phlb_cli.py#L43-L79
[ "def", "get_model_fields", "(", "model", ",", "add_reserver_flag", "=", "True", ")", ":", "import", "uliweb", ".", "orm", "as", "orm", "fields", "=", "[", "]", "m", "=", "{", "'type'", ":", "'type_name'", ",", "'hint'", ":", "'hint'", ",", "'default'", ":", "'default'", ",", "'required'", ":", "'required'", "}", "m1", "=", "{", "'index'", ":", "'index'", ",", "'unique'", ":", "'unique'", "}", "for", "name", ",", "prop", "in", "model", ".", "properties", ".", "items", "(", ")", ":", "if", "name", "==", "'id'", ":", "continue", "d", "=", "{", "}", "for", "k", ",", "v", "in", "m", ".", "items", "(", ")", ":", "d", "[", "k", "]", "=", "getattr", "(", "prop", ",", "v", ")", "for", "k", ",", "v", "in", "m1", ".", "items", "(", ")", ":", "d", "[", "k", "]", "=", "bool", "(", "prop", ".", "kwargs", ".", "get", "(", "v", ")", ")", "d", "[", "'name'", "]", "=", "prop", ".", "fieldname", "or", "name", "d", "[", "'verbose_name'", "]", "=", "unicode", "(", "prop", ".", "verbose_name", ")", "d", "[", "'nullable'", "]", "=", "bool", "(", "prop", ".", "kwargs", ".", "get", "(", "'nullable'", ",", "orm", ".", "__nullable__", ")", ")", "if", "d", "[", "'type'", "]", "in", "(", "'VARCHAR'", ",", "'CHAR'", ",", "'BINARY'", ",", "'VARBINARY'", ")", ":", "d", "[", "'max_length'", "]", "=", "prop", ".", "max_length", "if", "d", "[", "'type'", "]", "in", "(", "'Reference'", ",", "'OneToOne'", ",", "'ManyToMany'", ")", ":", "d", "[", "'reference_class'", "]", "=", "prop", ".", "reference_class", "#collection_name will be _collection_name, it the original value", "d", "[", "'collection_name'", "]", "=", "prop", ".", "_collection_name", "d", "[", "'server_default'", "]", "=", "prop", ".", "kwargs", ".", "get", "(", "'server_default'", ")", "d", "[", "'_reserved'", "]", "=", "True", "fields", ".", "append", "(", "d", ")", "return", "fields" ]
Start a Backup run
def backup ( path , name = None ) : from PyHardLinkBackup . phlb . phlb_main import backup backup ( path , name )
7,081
https://github.com/jedie/PyHardLinkBackup/blob/be28666834d2d9e3d8aac1b661cb2d5bd4056c29/PyHardLinkBackup/phlb_cli.py#L104-L108
[ "def", "generate_urls", "(", "self", ",", "first_url", ",", "last_url", ")", ":", "first_url", "=", "first_url", ".", "split", "(", "\"/\"", ")", "last_url", "=", "last_url", ".", "split", "(", "\"/\"", ")", "if", "first_url", "[", "0", "]", ".", "lower", "(", ")", "!=", "\"http:\"", "or", "last_url", "[", "0", "]", ".", "lower", "(", ")", "!=", "\"http:\"", ":", "raise", "Exception", "(", "\"URLs should be accessible via HTTP.\"", ")", "url_base", "=", "\"/\"", ".", "join", "(", "first_url", "[", ":", "-", "1", "]", ")", "start_index", "=", "first_url", "[", "-", "1", "]", ".", "index", "(", "\"a\"", ")", "file_name", "=", "first_url", "[", "-", "1", "]", "[", "0", ":", "start_index", "]", "url_base", "+=", "\"/\"", "+", "file_name", "start", "=", "first_url", "[", "-", "1", "]", "[", "start_index", ":", "]", "finish", "=", "last_url", "[", "-", "1", "]", "[", "start_index", ":", "]", "if", "start", ".", "count", "(", "\".\"", ")", "==", "1", "and", "finish", ".", "count", "(", "\".\"", ")", "==", "1", ":", "start", ",", "file_extension", "=", "start", ".", "split", "(", "\".\"", ")", "finish", ",", "_", "=", "finish", ".", "split", "(", "\".\"", ")", "if", "len", "(", "start", ")", "!=", "len", "(", "finish", ")", ":", "raise", "Exception", "(", "\"Filenames in url should have the same length.\"", ")", "file_extension", "=", "\".\"", "+", "file_extension", "else", ":", "raise", "Exception", "(", "\"URLs does not have the same pattern.\"", ")", "alphabet", "=", "\"abcdefghijklmnopqrstuvwxyz\"", "product", "=", "itertools", ".", "product", "(", "alphabet", ",", "repeat", "=", "len", "(", "start", ")", ")", "urls", "=", "[", "]", "for", "p", "in", "product", ":", "urls", ".", "append", "(", "[", "url_base", "+", "\"\"", ".", "join", "(", "p", ")", "+", "file_extension", "]", ")", "if", "\"\"", ".", "join", "(", "p", ")", "==", "finish", ":", "break", "return", "urls" ]
Verify a existing backup
def verify ( backup_path , fast ) : from PyHardLinkBackup . phlb . verify import verify_backup verify_backup ( backup_path , fast )
7,082
https://github.com/jedie/PyHardLinkBackup/blob/be28666834d2d9e3d8aac1b661cb2d5bd4056c29/PyHardLinkBackup/phlb_cli.py#L120-L124
[ "def", "_get_token", "(", "self", ")", ":", "# HTTP request", "try", ":", "raw_res", "=", "yield", "from", "self", ".", "_session", ".", "get", "(", "TOKEN_URL", ",", "headers", "=", "self", ".", "_headers", ",", "timeout", "=", "self", ".", "_timeout", ")", "except", "OSError", ":", "raise", "PyFidoError", "(", "\"Can not get token\"", ")", "# Research for json in answer", "content", "=", "yield", "from", "raw_res", ".", "text", "(", ")", "reg_res", "=", "re", ".", "search", "(", "r\"\\({.*}\\)\"", ",", "content", ")", "if", "reg_res", "is", "None", ":", "raise", "PyFidoError", "(", "\"Can not finf token json\"", ")", "# Load data as json", "return_data", "=", "json", ".", "loads", "(", "reg_res", ".", "group", "(", ")", "[", "1", ":", "-", "1", "]", ")", "# Get token and uuid", "token", "=", "return_data", ".", "get", "(", "'result'", ",", "{", "}", ")", ".", "get", "(", "'accessToken'", ")", "uuid", "=", "return_data", ".", "get", "(", "'result'", ",", "{", "}", ")", ".", "get", "(", "'userData'", ",", "{", "}", ")", ".", "get", "(", "'uuid'", ")", "# Check values", "if", "token", "is", "None", "or", "uuid", "is", "None", ":", "raise", "PyFidoError", "(", "\"Can not get token or uuid\"", ")", "return", "token", ",", "uuid" ]
Setup procedure .
def setup_package ( ) : import json from setuptools import setup , find_packages filename_setup_json = 'setup.json' filename_description = 'README.md' with open ( filename_setup_json , 'r' ) as handle : setup_json = json . load ( handle ) with open ( filename_description , 'r' ) as handle : description = handle . read ( ) setup ( include_package_data = True , packages = find_packages ( ) , setup_requires = [ 'reentry' ] , reentry_register = True , long_description = description , long_description_content_type = 'text/markdown' , * * setup_json )
7,083
https://github.com/aiidateam/aiida-codtools/blob/da5e4259b7a2e86cf0cc3f997e11dd36d445fa94/setup.py#L5-L26
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Transform an object value into a dict readable value
def literal_to_dict ( value ) : if isinstance ( value , Literal ) : if value . language is not None : return { "@value" : str ( value ) , "@language" : value . language } return value . toPython ( ) elif isinstance ( value , URIRef ) : return { "@id" : str ( value ) } elif value is None : return None return str ( value )
7,084
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/_json_ld.py#L4-L19
[ "def", "_is_path_match", "(", "req_path", ":", "str", ",", "cookie_path", ":", "str", ")", "->", "bool", ":", "if", "not", "req_path", ".", "startswith", "(", "\"/\"", ")", ":", "req_path", "=", "\"/\"", "if", "req_path", "==", "cookie_path", ":", "return", "True", "if", "not", "req_path", ".", "startswith", "(", "cookie_path", ")", ":", "return", "False", "if", "cookie_path", ".", "endswith", "(", "\"/\"", ")", ":", "return", "True", "non_matching", "=", "req_path", "[", "len", "(", "cookie_path", ")", ":", "]", "return", "non_matching", ".", "startswith", "(", "\"/\"", ")" ]
Transforms a JSON + LD PyLD dictionary into an RDFLib object
def dict_to_literal ( dict_container : dict ) : if isinstance ( dict_container [ "@value" ] , int ) : return dict_container [ "@value" ] , else : return dict_container [ "@value" ] , dict_container . get ( "@language" , None )
7,085
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/_json_ld.py#L22-L28
[ "def", "wait_for_compactions", "(", "self", ",", "timeout", "=", "120", ")", ":", "pattern", "=", "re", ".", "compile", "(", "\"pending tasks: 0\"", ")", "start", "=", "time", ".", "time", "(", ")", "while", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "output", ",", "err", ",", "rc", "=", "self", ".", "nodetool", "(", "\"compactionstats\"", ")", "if", "pattern", ".", "search", "(", "output", ")", ":", "return", "time", ".", "sleep", "(", "1", ")", "raise", "TimeoutError", "(", "\"{} [{}] Compactions did not finish in {} seconds\"", ".", "format", "(", "time", ".", "strftime", "(", "\"%d %b %Y %H:%M:%S\"", ",", "time", ".", "gmtime", "(", ")", ")", ",", "self", ".", "name", ",", "timeout", ")", ")" ]
Set one filepath to backup this file . Called for every file in the source directory .
def set_src_filepath ( self , src_dir_path ) : log . debug ( "set_src_filepath() with: '%s'" , src_dir_path ) self . abs_src_filepath = src_dir_path . resolved_path log . debug ( " * abs_src_filepath: %s" % self . abs_src_filepath ) if self . abs_src_filepath is None : log . info ( "Can't resolve source path: %s" , src_dir_path ) return self . sub_filepath = self . abs_src_filepath . relative_to ( self . abs_src_root ) log . debug ( " * sub_filepath: %s" % self . sub_filepath ) self . sub_path = self . sub_filepath . parent log . debug ( " * sub_path: %s" % self . sub_path ) self . filename = self . sub_filepath . name log . debug ( " * filename: %s" % self . filename ) self . abs_dst_path = Path2 ( self . abs_dst_root , self . sub_path ) log . debug ( " * abs_dst_path: %s" % self . abs_dst_path ) self . abs_dst_filepath = Path2 ( self . abs_dst_root , self . sub_filepath ) log . debug ( " * abs_dst_filepath: %s" % self . abs_dst_filepath ) self . abs_dst_hash_filepath = Path2 ( "%s%s%s" % ( self . abs_dst_filepath , os . extsep , phlb_config . hash_name ) ) log . debug ( " * abs_dst_hash_filepath: %s" % self . abs_dst_hash_filepath )
7,086
https://github.com/jedie/PyHardLinkBackup/blob/be28666834d2d9e3d8aac1b661cb2d5bd4056c29/PyHardLinkBackup/phlb/path_helper.py#L109-L140
[ "def", "rtt_control", "(", "self", ",", "command", ",", "config", ")", ":", "config_byref", "=", "ctypes", ".", "byref", "(", "config", ")", "if", "config", "is", "not", "None", "else", "None", "res", "=", "self", ".", "_dll", ".", "JLINK_RTTERMINAL_Control", "(", "command", ",", "config_byref", ")", "if", "res", "<", "0", ":", "raise", "errors", ".", "JLinkRTTException", "(", "res", ")", "return", "res" ]
Use a Pycryptodome cipher factory to encrypt data .
def _cryptodome_encrypt ( cipher_factory , plaintext , key , iv ) : encryptor = cipher_factory ( key , iv ) return encryptor . encrypt ( plaintext )
7,087
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L36-L49
[ "def", "get_default_frame", "(", "self", ")", ":", "if", "self", ".", "settings", ".", "terrainalt", "==", "'Auto'", ":", "if", "self", ".", "get_mav_param", "(", "'TERRAIN_FOLLOW'", ",", "0", ")", "==", "1", ":", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_TERRAIN_ALT", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_RELATIVE_ALT", "if", "self", ".", "settings", ".", "terrainalt", "==", "'True'", ":", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_TERRAIN_ALT", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_RELATIVE_ALT" ]
Use a Pycryptodome cipher factory to decrypt data .
def _cryptodome_decrypt ( cipher_factory , ciphertext , key , iv ) : decryptor = cipher_factory ( key , iv ) return decryptor . decrypt ( ciphertext )
7,088
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L52-L65
[ "def", "get_default_frame", "(", "self", ")", ":", "if", "self", ".", "settings", ".", "terrainalt", "==", "'Auto'", ":", "if", "self", ".", "get_mav_param", "(", "'TERRAIN_FOLLOW'", ",", "0", ")", "==", "1", ":", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_TERRAIN_ALT", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_RELATIVE_ALT", "if", "self", ".", "settings", ".", "terrainalt", "==", "'True'", ":", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_TERRAIN_ALT", "return", "mavutil", ".", "mavlink", ".", "MAV_FRAME_GLOBAL_RELATIVE_ALT" ]
Use a cryptography cipher factory to encrypt data .
def _cryptography_encrypt ( cipher_factory , plaintext , key , iv ) : encryptor = cipher_factory ( key , iv ) . encryptor ( ) return encryptor . update ( plaintext ) + encryptor . finalize ( )
7,089
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L68-L81
[ "def", "getOverlayMouseScale", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayMouseScale", "pvecMouseScale", "=", "HmdVector2_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "pvecMouseScale", ")", ")", "return", "result", ",", "pvecMouseScale" ]
Use a cryptography cipher factory to decrypt data .
def _cryptography_decrypt ( cipher_factory , ciphertext , key , iv ) : decryptor = cipher_factory ( key , iv ) . decryptor ( ) return decryptor . update ( ciphertext ) + decryptor . finalize ( )
7,090
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L84-L97
[ "def", "getOverlayMouseScale", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayMouseScale", "pvecMouseScale", "=", "HmdVector2_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "pvecMouseScale", ")", ")", "return", "result", ",", "pvecMouseScale" ]
Encrypt data using the available backend .
def generic_encrypt ( cipher_factory_map , plaintext , key , iv ) : if backend is None : raise PysnmpCryptoError ( 'Crypto backend not available' ) return _ENCRYPT_MAP [ backend ] ( cipher_factory_map [ backend ] , plaintext , key , iv )
7,091
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L110-L125
[ "def", "replace_attribute", "(", "module_name", ",", "attribute_name", ",", "new_value", ",", "dry_run", "=", "True", ")", ":", "init_file", "=", "'%s/__init__.py'", "%", "module_name", "_", ",", "tmp_file", "=", "tempfile", ".", "mkstemp", "(", ")", "with", "open", "(", "init_file", ")", "as", "input_file", ":", "with", "open", "(", "tmp_file", ",", "'w'", ")", "as", "output_file", ":", "for", "line", "in", "input_file", ":", "if", "line", ".", "startswith", "(", "attribute_name", ")", ":", "line", "=", "\"%s = '%s'\\n\"", "%", "(", "attribute_name", ",", "new_value", ")", "output_file", ".", "write", "(", "line", ")", "if", "not", "dry_run", ":", "Path", "(", "tmp_file", ")", ".", "copy", "(", "init_file", ")", "else", ":", "log", ".", "info", "(", "diff", "(", "tmp_file", ",", "init_file", ",", "retcode", "=", "None", ")", ")" ]
Decrypt data using the available backend .
def generic_decrypt ( cipher_factory_map , ciphertext , key , iv ) : if backend is None : raise PysnmpCryptoError ( 'Crypto backend not available' ) return _DECRYPT_MAP [ backend ] ( cipher_factory_map [ backend ] , ciphertext , key , iv )
7,092
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/__init__.py#L128-L143
[ "def", "replace_attribute", "(", "module_name", ",", "attribute_name", ",", "new_value", ",", "dry_run", "=", "True", ")", ":", "init_file", "=", "'%s/__init__.py'", "%", "module_name", "_", ",", "tmp_file", "=", "tempfile", ".", "mkstemp", "(", ")", "with", "open", "(", "init_file", ")", "as", "input_file", ":", "with", "open", "(", "tmp_file", ",", "'w'", ")", "as", "output_file", ":", "for", "line", "in", "input_file", ":", "if", "line", ".", "startswith", "(", "attribute_name", ")", ":", "line", "=", "\"%s = '%s'\\n\"", "%", "(", "attribute_name", ",", "new_value", ")", "output_file", ".", "write", "(", "line", ")", "if", "not", "dry_run", ":", "Path", "(", "tmp_file", ")", ".", "copy", "(", "init_file", ")", "else", ":", "log", ".", "info", "(", "diff", "(", "tmp_file", ",", "init_file", ",", "retcode", "=", "None", ")", ")" ]
format disks to xfs and mount it
def _prepare_disks ( self , disks_name ) : fstab = '/etc/fstab' for disk in tqdm ( disks_name . split ( ',' ) ) : sudo ( 'umount /dev/{0}' . format ( disk ) , warn_only = True ) if sudo ( 'mkfs.xfs -f /dev/{0}' . format ( disk ) , warn_only = True ) . failed : sudo ( 'apt-get update' ) sudo ( 'apt-get -y install xfsprogs' ) sudo ( 'mkfs.xfs -f /dev/{0}' . format ( disk ) ) sudo ( 'mkdir -p /srv/node/{0}' . format ( disk ) ) files . append ( fstab , '/dev/{0} /srv/node/{1} xfs noatime,nodiratime,nobarrier,logbufs=8 0 2' . format ( disk , disk ) , use_sudo = True ) sudo ( 'mount /srv/node/{0}' . format ( disk ) )
7,093
https://github.com/jiasir/playback/blob/58b2a5d669dcfaa8cad50c544a4b068dcacf9b69/playback/swift_storage.py#L260-L272
[ "def", "connection", "(", "cls", ")", ":", "local", "=", "cls", ".", "_threadlocal", "if", "not", "getattr", "(", "local", ",", "'connection'", ",", "None", ")", ":", "# Make sure these variables are no longer affected by other threads.", "local", ".", "user", "=", "cls", ".", "user", "local", ".", "password", "=", "cls", ".", "password", "local", ".", "site", "=", "cls", ".", "site", "local", ".", "timeout", "=", "cls", ".", "timeout", "local", ".", "headers", "=", "cls", ".", "headers", "local", ".", "format", "=", "cls", ".", "format", "local", ".", "version", "=", "cls", ".", "version", "local", ".", "url", "=", "cls", ".", "url", "if", "cls", ".", "site", "is", "None", ":", "raise", "ValueError", "(", "\"No shopify session is active\"", ")", "local", ".", "connection", "=", "ShopifyConnection", "(", "cls", ".", "site", ",", "cls", ".", "user", ",", "cls", ".", "password", ",", "cls", ".", "timeout", ",", "cls", ".", "format", ")", "return", "local", ".", "connection" ]
Load a Sudoku from file .
def load_file ( cls , file_path ) : with open ( os . path . abspath ( file_path ) , 'rt' ) as f : s = Sudoku ( f . read ( ) . strip ( ) ) return s
7,094
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L55-L67
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'request'", "in", "_dict", ":", "args", "[", "'request'", "]", "=", "MessageRequest", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'request'", ")", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'request\\' not present in Log JSON'", ")", "if", "'response'", "in", "_dict", ":", "args", "[", "'response'", "]", "=", "MessageResponse", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'response'", ")", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'response\\' not present in Log JSON'", ")", "if", "'log_id'", "in", "_dict", ":", "args", "[", "'log_id'", "]", "=", "_dict", ".", "get", "(", "'log_id'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'log_id\\' not present in Log JSON'", ")", "if", "'request_timestamp'", "in", "_dict", ":", "args", "[", "'request_timestamp'", "]", "=", "_dict", ".", "get", "(", "'request_timestamp'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'request_timestamp\\' not present in Log JSON'", ")", "if", "'response_timestamp'", "in", "_dict", ":", "args", "[", "'response_timestamp'", "]", "=", "_dict", ".", "get", "(", "'response_timestamp'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'response_timestamp\\' not present in Log JSON'", ")", "if", "'workspace_id'", "in", "_dict", ":", "args", "[", "'workspace_id'", "]", "=", "_dict", ".", "get", "(", "'workspace_id'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'workspace_id\\' not present in Log JSON'", ")", "if", "'language'", "in", "_dict", ":", "args", "[", "'language'", "]", "=", "_dict", ".", "get", "(", "'language'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'language\\' not present in Log JSON'", ")", "return", "cls", "(", "*", "*", "args", ")" ]
Parses a Sudoku instance from string input .
def _parse_from_string ( string_input ) : # Check if comment line is present. read_lines = list ( filter ( None , string_input . split ( '\n' ) ) ) if read_lines [ 0 ] . startswith ( '#' ) : comment = read_lines . pop ( 0 ) else : comment = '' if len ( read_lines ) > 1 : # Assume that Sudoku is defined over several rows. order = int ( math . sqrt ( len ( read_lines ) ) ) else : # Sudoku is defined on one line. order = int ( math . sqrt ( math . sqrt ( len ( read_lines [ 0 ] ) ) ) ) read_lines = filter ( lambda x : len ( x ) == ( order ** 2 ) , [ read_lines [ 0 ] [ i : ( i + order ** 2 ) ] for i in utils . range_ ( len ( read_lines [ 0 ] ) ) if i % ( order ** 2 ) == 0 ] ) matrix = utils . get_list_of_lists ( order ** 2 , order ** 2 , fill_with = 0 ) for i , line in enumerate ( read_lines ) : line = line . strip ( ) for j , value in enumerate ( line ) : if value . isdigit ( ) and int ( value ) : matrix [ i ] [ j ] = int ( value ) else : matrix [ i ] [ j ] = 0 return order , comment , matrix
7,095
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L70-L104
[ "def", "_wait_for_files", "(", "path", ")", ":", "timeout", "=", "0.001", "remaining", "=", "[", "]", "while", "timeout", "<", "1.0", ":", "remaining", "=", "[", "]", "if", "os", ".", "path", ".", "isdir", "(", "path", ")", ":", "L", "=", "os", ".", "listdir", "(", "path", ")", "for", "target", "in", "L", ":", "_remaining", "=", "_wait_for_files", "(", "target", ")", "if", "_remaining", ":", "remaining", ".", "extend", "(", "_remaining", ")", "continue", "try", ":", "os", ".", "unlink", "(", "path", ")", "except", "FileNotFoundError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "return", "except", "(", "OSError", ",", "IOError", ",", "PermissionError", ")", ":", "time", ".", "sleep", "(", "timeout", ")", "timeout", "*=", "2", "remaining", ".", "append", "(", "path", ")", "else", ":", "return", "return", "remaining" ]
Get an iterator over all rows in the Sudoku
def row_iter ( self ) : for k in utils . range_ ( self . side ) : yield self . row ( k )
7,096
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L147-L150
[ "def", "compare", "(", "left", ":", "Optional", "[", "L", "]", ",", "right", ":", "Optional", "[", "R", "]", ")", "->", "'Comparison[L, R]'", ":", "if", "isinstance", "(", "left", ",", "File", ")", "and", "isinstance", "(", "right", ",", "Directory", ")", ":", "return", "FileDirectoryComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "Directory", ")", "and", "isinstance", "(", "right", ",", "File", ")", ":", "return", "DirectoryFileComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "File", ")", "or", "isinstance", "(", "right", ",", "File", ")", ":", "return", "FileComparison", "(", "left", ",", "right", ")", "if", "isinstance", "(", "left", ",", "Directory", ")", "or", "isinstance", "(", "right", ",", "Directory", ")", ":", "return", "DirectoryComparison", "(", "left", ",", "right", ")", "raise", "TypeError", "(", "f'Cannot compare entities: {left}, {right}'", ")" ]
Get an iterator over all columns in the Sudoku
def col_iter ( self ) : for k in utils . range_ ( self . side ) : yield self . col ( k )
7,097
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L156-L159
[ "def", "approx_eq", "(", "val", ":", "Any", ",", "other", ":", "Any", ",", "*", ",", "atol", ":", "Union", "[", "int", ",", "float", "]", "=", "1e-8", ")", "->", "bool", ":", "# Check if val defines approximate equality via _approx_eq_. This takes", "# precedence over all other overloads.", "approx_eq_getter", "=", "getattr", "(", "val", ",", "'_approx_eq_'", ",", "None", ")", "if", "approx_eq_getter", "is", "not", "None", ":", "result", "=", "approx_eq_getter", "(", "other", ",", "atol", ")", "if", "result", "is", "not", "NotImplemented", ":", "return", "result", "# The same for other to make approx_eq symmetric.", "other_approx_eq_getter", "=", "getattr", "(", "other", ",", "'_approx_eq_'", ",", "None", ")", "if", "other_approx_eq_getter", "is", "not", "None", ":", "result", "=", "other_approx_eq_getter", "(", "val", ",", "atol", ")", "if", "result", "is", "not", "NotImplemented", ":", "return", "result", "# Compare primitive types directly.", "if", "isinstance", "(", "val", ",", "(", "int", ",", "float", ")", ")", ":", "if", "not", "isinstance", "(", "other", ",", "(", "int", ",", "float", ")", ")", ":", "return", "False", "return", "_isclose", "(", "val", ",", "other", ",", "atol", "=", "atol", ")", "if", "isinstance", "(", "val", ",", "complex", ")", ":", "if", "not", "isinstance", "(", "other", ",", "complex", ")", ":", "return", "False", "return", "_isclose", "(", "val", ",", "other", ",", "atol", "=", "atol", ")", "# Try to compare source and target recursively, assuming they're iterable.", "result", "=", "_approx_eq_iterables", "(", "val", ",", "other", ",", "atol", "=", "atol", ")", "# Fallback to __eq__() when anything else fails.", "if", "result", "is", "NotImplemented", ":", "return", "val", "==", "other", "return", "result" ]
Get the values of the box pertaining to the specified row and column of the Sudoku
def box ( self , row , col ) : box = [ ] box_i = ( row // self . order ) * self . order box_j = ( col // self . order ) * self . order for i in utils . range_ ( box_i , box_i + self . order ) : for j in utils . range_ ( box_j , box_j + self . order ) : box . append ( self [ i ] [ j ] ) return box
7,098
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L161-L169
[ "def", "_GetDelayImportTimestamps", "(", "self", ",", "pefile_object", ")", ":", "delay_import_timestamps", "=", "[", "]", "if", "not", "hasattr", "(", "pefile_object", ",", "'DIRECTORY_ENTRY_DELAY_IMPORT'", ")", ":", "return", "delay_import_timestamps", "for", "importdata", "in", "pefile_object", ".", "DIRECTORY_ENTRY_DELAY_IMPORT", ":", "dll_name", "=", "importdata", ".", "dll", "try", ":", "dll_name", "=", "dll_name", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "dll_name", "=", "dll_name", ".", "decode", "(", "'ascii'", ",", "errors", "=", "'replace'", ")", "timestamp", "=", "getattr", "(", "importdata", ".", "struct", ",", "'dwTimeStamp'", ",", "0", ")", "delay_import_timestamps", ".", "append", "(", "[", "dll_name", ",", "timestamp", "]", ")", "return", "delay_import_timestamps" ]
Get an iterator over all boxes in the Sudoku
def box_iter ( self ) : for i in utils . range_ ( self . order ) : for j in utils . range_ ( self . order ) : yield self . box ( i * 3 , j * 3 )
7,099
https://github.com/hbldh/dlxsudoku/blob/8d774e0883eb615533d04f07e58a95db716226e0/dlxsudoku/sudoku.py#L171-L175
[ "def", "QRatio", "(", "s1", ",", "s2", ",", "force_ascii", "=", "True", ",", "full_process", "=", "True", ")", ":", "if", "full_process", ":", "p1", "=", "utils", ".", "full_process", "(", "s1", ",", "force_ascii", "=", "force_ascii", ")", "p2", "=", "utils", ".", "full_process", "(", "s2", ",", "force_ascii", "=", "force_ascii", ")", "else", ":", "p1", "=", "s1", "p2", "=", "s2", "if", "not", "utils", ".", "validate_string", "(", "p1", ")", ":", "return", "0", "if", "not", "utils", ".", "validate_string", "(", "p2", ")", ":", "return", "0", "return", "ratio", "(", "p1", ",", "p2", ")" ]