query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Filter a list of DirEntryPath instances with the given pattern
def iter_filtered_dir_entry ( dir_entries , match_patterns , on_skip ) : def match ( dir_entry_path , match_patterns , on_skip ) : for match_pattern in match_patterns : if dir_entry_path . path_instance . match ( match_pattern ) : on_skip ( dir_entry_path , match_pattern ) return True return False for entry in dir_entries : try : dir_entry_path = DirEntryPath ( entry ) except FileNotFoundError as err : # e.g.: A file was deleted after the first filesystem scan # Will be obsolete if we use shadow-copy / snapshot function from filesystem # see: https://github.com/jedie/PyHardLinkBackup/issues/6 log . error ( "Can't make DirEntryPath() instance: %s" % err ) continue if match ( dir_entry_path , match_patterns , on_skip ) : yield None else : yield dir_entry_path
7,400
https://github.com/jedie/PyHardLinkBackup/blob/be28666834d2d9e3d8aac1b661cb2d5bd4056c29/PyHardLinkBackup/phlb/filesystem_walk.py#L85-L118
[ "async", "def", "send_frame", "(", "self", ",", "frame", ")", ":", "if", "not", "self", ".", "connection", ".", "connected", ":", "await", "self", ".", "connect", "(", ")", "await", "self", ".", "update_version", "(", ")", "await", "set_utc", "(", "pyvlx", "=", "self", ")", "await", "house_status_monitor_enable", "(", "pyvlx", "=", "self", ")", "self", ".", "connection", ".", "write", "(", "frame", ")" ]
Parses headers to create a pagination objects
def parse_pagination ( headers ) : links = { link . rel : parse_qs ( link . href ) . get ( "page" , None ) for link in link_header . parse ( headers . get ( "Link" , "" ) ) . links } return _Navigation ( links . get ( "previous" , [ None ] ) [ 0 ] , links . get ( "next" , [ None ] ) [ 0 ] , links . get ( "last" , [ None ] ) [ 0 ] , links . get ( "current" , [ None ] ) [ 0 ] , links . get ( "first" , [ None ] ) [ 0 ] )
7,401
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/_http.py#L10-L28
[ "def", "addJsonDirectory", "(", "self", ",", "directory", ",", "test", "=", "None", ")", ":", "for", "filename", "in", "os", ".", "listdir", "(", "directory", ")", ":", "try", ":", "fullPath", "=", "os", ".", "path", ".", "join", "(", "directory", ",", "filename", ")", "if", "not", "test", "or", "test", "(", "filename", ",", "fullPath", ")", ":", "with", "open", "(", "fullPath", ")", "as", "f", ":", "jsonData", "=", "json", ".", "load", "(", "f", ")", "name", ",", "_", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "self", ".", "addSource", "(", "name", ",", "jsonData", ")", "except", "ValueError", ":", "continue" ]
Parse a URI into a Route namedtuple
def parse_uri ( uri , endpoint_uri ) : temp_parse = urlparse ( uri ) return _Route ( urljoin ( endpoint_uri , temp_parse . path ) , parse_qs ( temp_parse . query ) )
7,402
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/_http.py#L31-L45
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Build a Pycryptodome AES Cipher object .
def _cryptodome_cipher ( key , iv ) : return AES . new ( key , AES . MODE_CFB , iv , segment_size = 128 )
7,403
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/aes.py#L17-L24
[ "def", "notify_duration_exceeded", "(", "self", ",", "participants", ",", "reference_time", ")", ":", "for", "participant", "in", "participants", ":", "participant", ".", "status", "=", "\"rejected\"", "session", ".", "commit", "(", ")" ]
Build a cryptography AES Cipher object .
def _cryptography_cipher ( key , iv ) : return Cipher ( algorithm = algorithms . AES ( key ) , mode = modes . CFB ( iv ) , backend = default_backend ( ) )
7,404
https://github.com/etingof/pysnmpcrypto/blob/9b92959f5e2fce833fa220343ca12add3134a77c/pysnmpcrypto/aes.py#L27-L39
[ "def", "delete_group", "(", "self", ",", "name", ")", ":", "group", "=", "self", ".", "get_group", "(", "name", ")", "method", ",", "url", "=", "get_URL", "(", "'group_delete'", ")", "payload", "=", "{", "'apikey'", ":", "self", ".", "config", ".", "get", "(", "'apikey'", ")", ",", "'logintoken'", ":", "self", ".", "session", ".", "cookies", ".", "get", "(", "'logintoken'", ")", ",", "'contactgroupid'", ":", "group", "[", "'contactgroupid'", "]", "}", "res", "=", "getattr", "(", "self", ".", "session", ",", "method", ")", "(", "url", ",", "params", "=", "payload", ")", "if", "res", ".", "status_code", "==", "200", ":", "return", "True", "hellraiser", "(", "res", ")" ]
Create an XML Node
def make_xml_node ( graph , name , close = False , attributes = None , text = "" , complete = False , innerXML = "" ) : name = graph . namespace_manager . qname ( name ) if complete : if attributes is not None : return "<{0} {1}>{2}{3}</{0}>" . format ( name , " " . join ( [ "{}=\"{}\"" . format ( attr_name , attr_value ) for attr_name , attr_value in attributes . items ( ) ] ) , escape ( text ) , innerXML ) return "<{0}>{1}{2}</{0}>" . format ( name , escape ( text ) , innerXML ) elif close is True : return "</{}>" . format ( name ) elif attributes is not None : return "<{} {}>" . format ( name , " " . join ( [ "{}=\"{}\"" . format ( attr_name , attr_value ) for attr_name , attr_value in attributes . items ( ) ] ) ) return "<{}>" . format ( name )
7,405
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/xml.py#L27-L67
[ "def", "find_best_frametype", "(", "channel", ",", "start", ",", "end", ",", "frametype_match", "=", "None", ",", "allow_tape", "=", "True", ",", "connection", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "try", ":", "return", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'error'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "except", "RuntimeError", ":", "# gaps (or something else went wrong)", "ftout", "=", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "return_all", "=", "True", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'ignore'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "try", ":", "if", "isinstance", "(", "ftout", ",", "dict", ")", ":", "return", "{", "key", ":", "ftout", "[", "key", "]", "[", "0", "]", "for", "key", "in", "ftout", "}", "return", "ftout", "[", "0", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Cannot find any valid frametypes for channel(s)\"", ")" ]
Perform an XPath on an element and indicate if we need to loop over it to find something
def performXpath ( parent , xpath ) : loop = False if xpath . startswith ( ".//" ) : result = parent . xpath ( xpath . replace ( ".//" , "./" , 1 ) , namespaces = XPATH_NAMESPACES ) if len ( result ) == 0 : result = parent . xpath ( "*[{}]" . format ( xpath ) , namespaces = XPATH_NAMESPACES ) loop = True else : result = parent . xpath ( xpath , namespaces = XPATH_NAMESPACES ) return result [ 0 ] , loop
7,406
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/xml.py#L129-L153
[ "def", "scan_resource", "(", "self", ",", "pkg", ",", "path", ")", ":", "for", "fname", "in", "resource_listdir", "(", "pkg", ",", "path", ")", ":", "if", "fname", ".", "endswith", "(", "TABLE_EXT", ")", ":", "table_path", "=", "posixpath", ".", "join", "(", "path", ",", "fname", ")", "with", "contextlib", ".", "closing", "(", "resource_stream", "(", "pkg", ",", "table_path", ")", ")", "as", "stream", ":", "self", ".", "add_colortable", "(", "stream", ",", "posixpath", ".", "splitext", "(", "posixpath", ".", "basename", "(", "fname", ")", ")", "[", "0", "]", ")" ]
Copy an XML Node
def copyNode ( node , children = False , parent = False ) : if parent is not False : element = SubElement ( parent , node . tag , attrib = node . attrib , nsmap = { None : "http://www.tei-c.org/ns/1.0" } ) else : element = Element ( node . tag , attrib = node . attrib , nsmap = { None : "http://www.tei-c.org/ns/1.0" } ) if children : if node . text : element . _setText ( node . text ) for child in xmliter ( node ) : element . append ( copy ( child ) ) return element
7,407
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/xml.py#L156-L182
[ "def", "find_best_frametype", "(", "channel", ",", "start", ",", "end", ",", "frametype_match", "=", "None", ",", "allow_tape", "=", "True", ",", "connection", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "try", ":", "return", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'error'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "except", "RuntimeError", ":", "# gaps (or something else went wrong)", "ftout", "=", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "return_all", "=", "True", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'ignore'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "try", ":", "if", "isinstance", "(", "ftout", ",", "dict", ")", ":", "return", "{", "key", ":", "ftout", "[", "key", "]", "[", "0", "]", "for", "key", "in", "ftout", "}", "return", "ftout", "[", "0", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Cannot find any valid frametypes for channel(s)\"", ")" ]
Normalize XPATH split around slashes
def normalizeXpath ( xpath ) : new_xpath = [ ] for x in range ( 0 , len ( xpath ) ) : if x > 0 and len ( xpath [ x - 1 ] ) == 0 : new_xpath . append ( "/" + xpath [ x ] ) elif len ( xpath [ x ] ) > 0 : new_xpath . append ( xpath [ x ] ) return new_xpath
7,408
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/xml.py#L185-L199
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Loop over passages to construct and increment new tree given a parent and XPaths
def passageLoop ( parent , new_tree , xpath1 , xpath2 = None , preceding_siblings = False , following_siblings = False ) : current_1 , queue_1 = __formatXpath__ ( xpath1 ) if xpath2 is None : # In case we need what is following or preceding our node result_1 , loop = performXpath ( parent , current_1 ) if loop is True : queue_1 = xpath1 central = None has_no_queue = len ( queue_1 ) == 0 # For each sibling, when we need them in the context of a range if preceding_siblings or following_siblings : for sibling in xmliter ( parent ) : if sibling == result_1 : central = True # We copy the node we looked for (Result_1) child = copyNode ( result_1 , children = has_no_queue , parent = new_tree ) # if we don't have children # we loop over the passage child if not has_no_queue : passageLoop ( result_1 , child , queue_1 , None , preceding_siblings = preceding_siblings , following_siblings = following_siblings ) # If we were waiting for preceding_siblings, we break it off # As we don't need to go further if preceding_siblings : break elif not central and preceding_siblings : copyNode ( sibling , parent = new_tree , children = True ) elif central and following_siblings : copyNode ( sibling , parent = new_tree , children = True ) else : result_1 , loop = performXpath ( parent , current_1 ) if loop is True : queue_1 = xpath1 if xpath2 == xpath1 : current_2 , queue_2 = current_1 , queue_1 else : current_2 , queue_2 = __formatXpath__ ( xpath2 ) else : current_2 , queue_2 = __formatXpath__ ( xpath2 ) if xpath1 != xpath2 : result_2 , loop = performXpath ( parent , current_2 ) if loop is True : queue_2 = xpath2 else : result_2 = result_1 if result_1 == result_2 : has_no_queue = len ( queue_1 ) == 0 child = copyNode ( result_1 , children = has_no_queue , parent = new_tree ) if not has_no_queue : passageLoop ( result_1 , child , queue_1 , queue_2 ) else : start = False # For each sibling for sibling in xmliter ( parent ) : # If we have found start # We copy the node because we are between start and end if start : # If we are at the end # We break the copy if sibling == result_2 : break else : copyNode ( sibling , parent = new_tree , children = True ) # If this is start # Then we copy it and initiate star elif sibling == result_1 : start = True has_no_queue_1 = len ( queue_1 ) == 0 node = copyNode ( sibling , children = has_no_queue_1 , parent = new_tree ) if not has_no_queue_1 : passageLoop ( sibling , node , queue_1 , None , following_siblings = True ) continue_loop = len ( queue_2 ) == 0 node = copyNode ( result_2 , children = continue_loop , parent = new_tree ) if not continue_loop : passageLoop ( result_2 , node , queue_2 , None , preceding_siblings = True ) return new_tree
7,409
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/common/utils/xml.py#L202-L304
[ "def", "start", "(", "self", ")", ":", "self", ".", "publish_properties", "(", ")", "self", ".", "subscribe_topics", "(", ")", "gc", ".", "collect", "(", ")", "self", ".", "set_state", "(", "\"ready\"", ")", "while", "True", ":", "try", ":", "if", "not", "utils", ".", "wlan", ".", "isconnected", "(", ")", ":", "utils", ".", "wifi_connect", "(", ")", "# publish device data", "self", ".", "publish_data", "(", ")", "# check for new mqtt messages", "self", ".", "mqtt", ".", "check_msg", "(", ")", "idle", "(", ")", "sleep", "(", "1", ")", "except", "KeyboardInterrupt", ":", "self", ".", "set_state", "(", "\"disconnected\"", ")", "self", ".", "mqtt", ".", "disconnect", "(", ")" ]
Return label for given lang or any default
def get_label ( self , lang = None ) : x = None if lang is None : for obj in self . graph . objects ( self . asNode ( ) , RDFS . label ) : return obj for obj in self . graph . objects ( self . asNode ( ) , RDFS . label ) : x = obj if x . language == lang : return x return x
7,410
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/prototypes/metadata.py#L125-L140
[ "def", "run", "(", "self", ")", ":", "self", ".", "output", "(", "\"* NDVI processing started.\"", ",", "normal", "=", "True", ")", "bands", "=", "self", ".", "_read_bands", "(", ")", "image_data", "=", "self", ".", "_get_image_data", "(", ")", "new_bands", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "2", ")", ":", "new_bands", ".", "append", "(", "numpy", ".", "empty", "(", "image_data", "[", "'shape'", "]", ",", "dtype", "=", "numpy", ".", "float32", ")", ")", "self", ".", "_warp", "(", "image_data", ",", "bands", ",", "new_bands", ")", "# Bands are no longer needed", "del", "bands", "calc_band", "=", "numpy", ".", "true_divide", "(", "(", "new_bands", "[", "1", "]", "-", "new_bands", "[", "0", "]", ")", ",", "(", "new_bands", "[", "1", "]", "+", "new_bands", "[", "0", "]", ")", ")", "output_band", "=", "numpy", ".", "rint", "(", "(", "calc_band", "+", "1", ")", "*", "255", "/", "2", ")", ".", "astype", "(", "numpy", ".", "uint8", ")", "output_file", "=", "join", "(", "self", ".", "dst_path", ",", "self", ".", "_filename", "(", "suffix", "=", "'NDVI'", ")", ")", "return", "self", ".", "write_band", "(", "output_band", ",", "output_file", ",", "image_data", ")" ]
Iterator to find parents of current collection from closest to furthest
def parents ( self ) -> List [ "Collection" ] : p = self . parent parents = [ ] while p is not None : parents . append ( p ) p = p . parent return parents
7,411
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/prototypes/metadata.py#L162-L172
[ "def", "_do_http", "(", "opts", ",", "profile", "=", "'default'", ")", ":", "ret", "=", "{", "}", "url", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:url'", ".", "format", "(", "profile", ")", ",", "''", ")", "user", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:user'", ".", "format", "(", "profile", ")", ",", "''", ")", "passwd", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:pass'", ".", "format", "(", "profile", ")", ",", "''", ")", "realm", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:realm'", ".", "format", "(", "profile", ")", ",", "''", ")", "timeout", "=", "__salt__", "[", "'config.get'", "]", "(", "'modjk:{0}:timeout'", ".", "format", "(", "profile", ")", ",", "''", ")", "if", "not", "url", ":", "raise", "Exception", "(", "'missing url in profile {0}'", ".", "format", "(", "profile", ")", ")", "if", "user", "and", "passwd", ":", "auth", "=", "_auth", "(", "url", "=", "url", ",", "realm", "=", "realm", ",", "user", "=", "user", ",", "passwd", "=", "passwd", ")", "_install_opener", "(", "auth", ")", "url", "+=", "'?{0}'", ".", "format", "(", "_urlencode", "(", "opts", ")", ")", "for", "line", "in", "_urlopen", "(", "url", ",", "timeout", "=", "timeout", ")", ".", "read", "(", ")", ".", "splitlines", "(", ")", ":", "splt", "=", "line", ".", "split", "(", "'='", ",", "1", ")", "if", "splt", "[", "0", "]", "in", "ret", ":", "ret", "[", "splt", "[", "0", "]", "]", "+=", "',{0}'", ".", "format", "(", "splt", "[", "1", "]", ")", "else", ":", "ret", "[", "splt", "[", "0", "]", "]", "=", "splt", "[", "1", "]", "return", "ret" ]
Does not add member if it already knows it .
def _add_member ( self , member ) : if member . id in self . children : return None else : self . children [ member . id ] = member
7,412
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/prototypes/metadata.py#L196-L206
[ "def", "prepare_renderable", "(", "request", ",", "test_case_result", ",", "is_admin", ")", ":", "test_case", "=", "test_case_result", ".", "test_case", "file_directory", "=", "request", ".", "registry", ".", "settings", "[", "'file_directory'", "]", "sha1", "=", "test_case_result", ".", "diff", ".", "sha1", "if", "test_case_result", ".", "diff", "else", "None", "kwargs", "=", "{", "'number'", ":", "test_case", ".", "id", ",", "'group'", ":", "test_case", ".", "testable", ".", "name", ",", "'name'", ":", "test_case", ".", "name", ",", "'points'", ":", "test_case", ".", "points", ",", "'status'", ":", "test_case_result", ".", "status", ",", "'extra'", ":", "test_case_result", ".", "extra", "}", "if", "test_case", ".", "output_type", "==", "'image'", ":", "url", "=", "request", ".", "route_path", "(", "'file_item'", ",", "filename", "=", "'_'", ",", "_query", "=", "{", "'raw'", ":", "1", "}", ",", "sha1sum", "=", "sha1", ")", "if", "sha1", "else", "None", "return", "ImageOutput", "(", "url", "=", "url", ",", "*", "*", "kwargs", ")", "elif", "test_case", ".", "output_type", "==", "'text'", ":", "content", "=", "None", "if", "sha1", ":", "with", "open", "(", "File", ".", "file_path", "(", "file_directory", ",", "sha1", ")", ")", "as", "fp", ":", "content", "=", "fp", ".", "read", "(", ")", "return", "TextOutput", "(", "content", "=", "content", ",", "*", "*", "kwargs", ")", "elif", "not", "test_case_result", ".", "diff", ":", "# Outputs match", "return", "DiffWithMetadata", "(", "diff", "=", "None", ",", "*", "*", "kwargs", ")", "try", ":", "with", "open", "(", "File", ".", "file_path", "(", "file_directory", ",", "sha1", ")", ")", "as", "fp", ":", "diff", "=", "pickle", ".", "load", "(", "fp", ")", "except", "(", "AttributeError", ",", "EOFError", ")", ":", "content", "=", "'submit system mismatch -- requeue submission'", "content", "+=", "traceback", ".", "format_exc", "(", "1", ")", "return", "TextOutput", "(", "content", "=", "content", ",", "*", "*", "kwargs", ")", "except", "Exception", ":", "content", "=", "'unexected error -- requeue submission\\n'", "content", "+=", "traceback", ".", "format_exc", "(", "1", ")", "return", "TextOutput", "(", "content", "=", "content", ",", "*", "*", "kwargs", ")", "diff", ".", "hide_expected", "=", "not", "is_admin", "and", "test_case", ".", "hide_expected", "return", "DiffWithMetadata", "(", "diff", "=", "diff", ",", "*", "*", "kwargs", ")" ]
Export the base DTS information in a simple reusable way
def export_base_dts ( cls , graph , obj , nsm ) : o = { "@id" : str ( obj . asNode ( ) ) , "@type" : nsm . qname ( obj . type ) , nsm . qname ( RDF_NAMESPACES . HYDRA . title ) : str ( obj . get_label ( ) ) , nsm . qname ( RDF_NAMESPACES . HYDRA . totalItems ) : obj . size } for desc in graph . objects ( obj . asNode ( ) , RDF_NAMESPACES . HYDRA . description ) : o [ nsm . qname ( RDF_NAMESPACES . HYDRA . description ) ] = str ( desc ) return o
7,413
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/prototypes/metadata.py#L306-L325
[ "def", "open", "(", "self", ",", "file_path", ")", ":", "if", "self", ".", "is_opened", "(", ")", "and", "self", ".", "workbook", ".", "file_path", "==", "file_path", ":", "self", ".", "_logger", ".", "logger", ".", "debug", "(", "\"workbook already opened: {}\"", ".", "format", "(", "self", ".", "workbook", ".", "file_path", ")", ")", "return", "self", ".", "close", "(", ")", "self", ".", "_open", "(", "file_path", ")" ]
Get the subject of the object
def get_subject ( self , lang = None ) : return self . metadata . get_single ( key = DC . subject , lang = lang )
7,414
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/prototypes/metadata.py#L488-L495
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Context sent to templates for rendering include the form s cleaned data and also the current Request object .
def get_context ( self ) : if not self . is_valid ( ) : raise ValueError ( "Cannot generate Context when form is invalid." ) return dict ( request = self . request , * * self . cleaned_data )
7,415
https://github.com/madisona/django-contact-form/blob/0800034a7231f35a3d5b5cd73968e6115b9ce01c/contact_form/forms.py#L24-L31
[ "def", "expected_value", "(", "operator", ",", "eig_values", ",", "eig_states", ",", "beta", ")", ":", "aux", "=", "np", ".", "einsum", "(", "'i,ji,ji'", ",", "np", ".", "exp", "(", "-", "beta", "*", "eig_values", ")", ",", "eig_states", ",", "operator", ".", "dot", "(", "eig_states", ")", ")", "return", "aux", "/", "partition_func", "(", "beta", ",", "eig_values", ")" ]
calculate pressure from anharmonicity for Zharkov equation the equation is from Dorogokupets 2015
def zharkov_panh ( v , temp , v0 , a0 , m , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) x = v / v0 a = a0 * np . power ( x , m ) def f ( t ) : return three_r * n / 2. * a * m / v_mol * np . power ( t , 2. ) * 1.e-9 return f ( temp ) - f ( t_ref )
7,416
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_anharmonic.py#L6-L29
[ "def", "index_split", "(", "index", ",", "chunks", ")", ":", "Ntotal", "=", "index", ".", "shape", "[", "0", "]", "Nsections", "=", "int", "(", "chunks", ")", "if", "Nsections", "<=", "0", ":", "raise", "ValueError", "(", "'number sections must be larger than 0.'", ")", "Neach_section", ",", "extras", "=", "divmod", "(", "Ntotal", ",", "Nsections", ")", "section_sizes", "=", "(", "[", "0", "]", "+", "extras", "*", "[", "Neach_section", "+", "1", "]", "+", "(", "Nsections", "-", "extras", ")", "*", "[", "Neach_section", "]", ")", "div_points", "=", "numpy", ".", "array", "(", "section_sizes", ")", ".", "cumsum", "(", ")", "sub_ind", "=", "[", "]", "for", "i", "in", "range", "(", "Nsections", ")", ":", "st", "=", "div_points", "[", "i", "]", "end", "=", "div_points", "[", "i", "+", "1", "]", "sub_ind", ".", "append", "(", "index", "[", "st", ":", "end", "]", ")", "return", "sub_ind" ]
Return the list of words contained in a line .
def split_words ( line ) : # Normalize any camel cased words first line = _NORM_REGEX . sub ( r'\1 \2' , line ) return [ normalize ( w ) for w in _WORD_REGEX . split ( line ) ]
7,417
https://github.com/lyda/misspell-check/blob/f8c5d67a5ffaeb0a7101efd5a4ace81c73955efa/misspellings_lib.py#L26-L30
[ "def", "_save_files", "(", "self", ",", "data", ",", "dtype_out_time", ")", ":", "path", "=", "self", ".", "path_out", "[", "dtype_out_time", "]", "if", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "dir_out", ")", ":", "os", ".", "makedirs", "(", "self", ".", "dir_out", ")", "if", "'reg'", "in", "dtype_out_time", ":", "try", ":", "reg_data", "=", "xr", ".", "open_dataset", "(", "path", ")", "except", "(", "EOFError", ",", "RuntimeError", ",", "IOError", ")", ":", "reg_data", "=", "xr", ".", "Dataset", "(", ")", "reg_data", ".", "update", "(", "data", ")", "data_out", "=", "reg_data", "else", ":", "data_out", "=", "data", "if", "isinstance", "(", "data_out", ",", "xr", ".", "DataArray", ")", ":", "data_out", "=", "xr", ".", "Dataset", "(", "{", "self", ".", "name", ":", "data_out", "}", ")", "data_out", ".", "to_netcdf", "(", "path", ",", "engine", "=", "'netcdf4'", ",", "format", "=", "'NETCDF3_64BIT'", ")" ]
Adds files to check .
def add ( self , files ) : if files . __class__ . __name__ == 'str' : self . _files . append ( files ) else : self . _files . extend ( files )
7,418
https://github.com/lyda/misspell-check/blob/f8c5d67a5ffaeb0a7101efd5a4ace81c73955efa/misspellings_lib.py#L67-L76
[ "def", "load_stream", "(", "self", ",", "stream", ")", ":", "batches", "=", "super", "(", "ArrowStreamPandasSerializer", ",", "self", ")", ".", "load_stream", "(", "stream", ")", "import", "pyarrow", "as", "pa", "for", "batch", "in", "batches", ":", "yield", "[", "self", ".", "arrow_to_pandas", "(", "c", ")", "for", "c", "in", "pa", ".", "Table", ".", "from_batches", "(", "[", "batch", "]", ")", ".", "itercolumns", "(", ")", "]" ]
Checks the files for misspellings .
def check ( self ) : errors = [ ] results = [ ] for fn in self . _files : if not os . path . isdir ( fn ) : try : with open ( fn , 'r' ) as f : line_ct = 1 for line in f : for word in split_words ( line ) : if ( word in self . _misspelling_dict or word . lower ( ) in self . _misspelling_dict ) : results . append ( [ fn , line_ct , word ] ) line_ct += 1 except UnicodeDecodeError : pass except IOError : errors . append ( '%s' % sys . exc_info ( ) [ 1 ] ) return errors , results
7,419
https://github.com/lyda/misspell-check/blob/f8c5d67a5ffaeb0a7101efd5a4ace81c73955efa/misspellings_lib.py#L78-L104
[ "def", "remove_server", "(", "self", ",", "server_id", ")", ":", "# Validate server_id", "server", "=", "self", ".", "_get_server", "(", "server_id", ")", "# Delete any instances we recorded to be cleaned up", "if", "server_id", "in", "self", ".", "_owned_subscriptions", ":", "inst_list", "=", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_filters", ":", "inst_list", "=", "self", ".", "_owned_filters", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_filters", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_destinations", ":", "inst_list", "=", "self", ".", "_owned_destinations", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_destinations", "[", "server_id", "]", "# Remove server from this listener", "del", "self", ".", "_servers", "[", "server_id", "]" ]
Returns a list of suggestions for a misspelled word .
def suggestions ( self , word ) : suggestions = set ( self . _misspelling_dict . get ( word , [ ] ) ) . union ( set ( self . _misspelling_dict . get ( word . lower ( ) , [ ] ) ) ) return sorted ( [ same_case ( source = word , destination = w ) for w in suggestions ] )
7,420
https://github.com/lyda/misspell-check/blob/f8c5d67a5ffaeb0a7101efd5a4ace81c73955efa/misspellings_lib.py#L106-L118
[ "def", "setSignalHeaders", "(", "self", ",", "signalHeaders", ")", ":", "for", "edfsignal", "in", "np", ".", "arange", "(", "self", ".", "n_channels", ")", ":", "self", ".", "channels", "[", "edfsignal", "]", "=", "signalHeaders", "[", "edfsignal", "]", "self", ".", "update_header", "(", ")" ]
Returns a list of misspelled words and corrections .
def dump_misspelling_list ( self ) : results = [ ] for bad_word in sorted ( self . _misspelling_dict . keys ( ) ) : for correction in self . _misspelling_dict [ bad_word ] : results . append ( [ bad_word , correction ] ) return results
7,421
https://github.com/lyda/misspell-check/blob/f8c5d67a5ffaeb0a7101efd5a4ace81c73955efa/misspellings_lib.py#L120-L126
[ "def", "centred_timegrid", "(", "cls", ",", "simulationstep", ")", ":", "simulationstep", "=", "Period", "(", "simulationstep", ")", "return", "Timegrid", "(", "cls", ".", "_STARTDATE", "+", "simulationstep", "/", "2", ",", "cls", ".", "_ENDDATE", "+", "simulationstep", "/", "2", ",", "simulationstep", ")" ]
Get the status of Alerting Service
def status ( self ) : orig_dict = self . _get ( self . _service_url ( 'status' ) ) orig_dict [ 'implementation_version' ] = orig_dict . pop ( 'Implementation-Version' ) orig_dict [ 'built_from_git_sha1' ] = orig_dict . pop ( 'Built-From-Git-SHA1' ) return Status ( orig_dict )
7,422
https://github.com/hawkular/hawkular-client-python/blob/52371f9ebabbe310efee2a8ff8eb735ccc0654bb/hawkular/alerts/common.py#L81-L90
[ "def", "read", "(", "self", ",", "num_bytes", ")", ":", "while", "len", "(", "self", ".", "decoded", ")", "<", "num_bytes", ":", "try", ":", "tag", ",", "data", "=", "next", "(", "self", ".", "chunks", ")", "except", "StopIteration", ":", "raise", "EOFError", "(", ")", "if", "tag", "!=", "b'IDAT'", ":", "continue", "self", ".", "decoded", "+=", "self", ".", "decompressor", ".", "decompress", "(", "data", ")", "r", "=", "self", ".", "decoded", "[", ":", "num_bytes", "]", "self", ".", "decoded", "=", "self", ".", "decoded", "[", "num_bytes", ":", "]", "return", "r" ]
Query or manipulate smother reports
def cli ( ctx , report , semantic , rcfile ) : ctx . obj = { 'report' : report , 'semantic' : semantic , 'rcfile' : rcfile , }
7,423
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L27-L35
[ "def", "data2md", "(", "table", ")", ":", "table", "=", "copy", ".", "deepcopy", "(", "table", ")", "table", "=", "ensure_table_strings", "(", "table", ")", "table", "=", "multis_2_mono", "(", "table", ")", "table", "=", "add_cushions", "(", "table", ")", "widths", "=", "[", "]", "for", "column", "in", "range", "(", "len", "(", "table", "[", "0", "]", ")", ")", ":", "widths", ".", "append", "(", "get_column_width", "(", "column", ",", "table", ")", ")", "output", "=", "'|'", "for", "i", "in", "range", "(", "len", "(", "table", "[", "0", "]", ")", ")", ":", "output", "=", "''", ".", "join", "(", "[", "output", ",", "center_line", "(", "widths", "[", "i", "]", ",", "table", "[", "0", "]", "[", "i", "]", ")", ",", "'|'", "]", ")", "output", "=", "output", "+", "'\\n|'", "for", "i", "in", "range", "(", "len", "(", "table", "[", "0", "]", ")", ")", ":", "output", "=", "''", ".", "join", "(", "[", "output", ",", "center_line", "(", "widths", "[", "i", "]", ",", "\"-\"", "*", "widths", "[", "i", "]", ")", ",", "'|'", "]", ")", "output", "=", "output", "+", "'\\n|'", "for", "row", "in", "range", "(", "1", ",", "len", "(", "table", ")", ")", ":", "for", "column", "in", "range", "(", "len", "(", "table", "[", "row", "]", ")", ")", ":", "output", "=", "''", ".", "join", "(", "[", "output", ",", "center_line", "(", "widths", "[", "column", "]", ",", "table", "[", "row", "]", "[", "column", "]", ")", ",", "'|'", "]", ")", "output", "=", "output", "+", "'\\n|'", "split", "=", "output", ".", "split", "(", "'\\n'", ")", "split", ".", "pop", "(", ")", "table_string", "=", "'\\n'", ".", "join", "(", "split", ")", "return", "table_string" ]
Determine which tests intersect a source interval .
def lookup ( ctx , path ) : regions = parse_intervals ( path , as_context = ctx . obj [ 'semantic' ] ) _report_from_regions ( regions , ctx . obj )
7,424
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L48-L53
[ "def", "embedding", "(", "indices", ",", "vocab_dim", ",", "output_dim", ",", "variable_dtype", ",", "name", "=", "\"embedding\"", ")", ":", "weights", "=", "embedding_weights", "(", "indices", ".", "mesh", ",", "vocab_dim", ",", "output_dim", ",", "variable_dtype", ",", "name", ")", "return", "mtf", ".", "gather", "(", "weights", ",", "indices", ",", "vocab_dim", ")" ]
Determine which tests intersect a git diff .
def diff ( ctx , branch ) : diff = GitDiffReporter ( branch ) regions = diff . changed_intervals ( ) _report_from_regions ( regions , ctx . obj , file_factory = diff . old_file )
7,425
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L59-L65
[ "def", "InputCodon", "(", "seq_length", ",", "ignore_stop_codons", "=", "True", ",", "name", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "ignore_stop_codons", ":", "vocab", "=", "CODONS", "else", ":", "vocab", "=", "CODONS", "+", "STOP_CODONS", "assert", "seq_length", "%", "3", "==", "0", "return", "Input", "(", "(", "seq_length", "/", "3", ",", "len", "(", "vocab", ")", ")", ",", "name", "=", "name", ",", "*", "*", "kwargs", ")" ]
Combine several smother reports .
def combine ( ctx , src , dst ) : c = coverage . Coverage ( config_file = ctx . obj [ 'rcfile' ] ) result = Smother ( c ) for infile in src : result |= Smother . load ( infile ) result . write ( dst )
7,426
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L72-L82
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Converts all file paths in a smother report to relative paths relative to the current directory .
def convert_to_relative_paths ( src , dst ) : result = Smother . convert_to_relative_paths ( Smother . load ( src ) ) result . write ( dst )
7,427
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L88-L94
[ "def", "group_by", "(", "self", ",", "key_selector", "=", "identity", ",", "element_selector", "=", "identity", ",", "result_selector", "=", "lambda", "key", ",", "grouping", ":", "grouping", ")", ":", "if", "self", ".", "closed", "(", ")", ":", "raise", "ValueError", "(", "\"Attempt to call group_by() on a closed \"", "\"Queryable.\"", ")", "if", "not", "is_callable", "(", "key_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter key_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "key_selector", ")", ")", ")", "if", "not", "is_callable", "(", "element_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter element_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "element_selector", ")", ")", ")", "if", "not", "is_callable", "(", "result_selector", ")", ":", "raise", "TypeError", "(", "\"group_by() parameter result_selector={0} is not \"", "\"callable\"", ".", "format", "(", "repr", "(", "result_selector", ")", ")", ")", "return", "self", ".", "_create", "(", "self", ".", "_generate_group_by_result", "(", "key_selector", ",", "element_selector", ",", "result_selector", ")", ")" ]
Flatten a coverage file into a CSV of source_context testname
def csv ( ctx , dst ) : sm = Smother . load ( ctx . obj [ 'report' ] ) semantic = ctx . obj [ 'semantic' ] writer = _csv . writer ( dst , lineterminator = '\n' ) dst . write ( "source_context, test_context\n" ) writer . writerows ( sm . iter_records ( semantic = semantic ) )
7,428
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L100-L109
[ "def", "get_entity_info", "(", "pdb_id", ")", ":", "out", "=", "get_info", "(", "pdb_id", ",", "url_root", "=", "'http://www.rcsb.org/pdb/rest/getEntityInfo?structureId='", ")", "out", "=", "to_dict", "(", "out", ")", "return", "remove_at_sign", "(", "out", "[", "'entityInfo'", "]", "[", "'PDB'", "]", ")" ]
Erase the existing smother report .
def erase ( ctx ) : if os . path . exists ( ctx . obj [ 'report' ] ) : os . remove ( ctx . obj [ 'report' ] )
7,429
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L114-L119
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Produce a . coverage file from a smother file
def to_coverage ( ctx ) : sm = Smother . load ( ctx . obj [ 'report' ] ) sm . coverage = coverage . coverage ( ) sm . write_coverage ( )
7,430
https://github.com/ChrisBeaumont/smother/blob/65d1ea6ae0060d213b0dcbb983c5aa8e7fee07bb/smother/cli.py#L124-L130
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
This method fills with 0 s missing fields
def fill_missing_fields ( self , data , columns ) : for column in columns : if column not in data . columns : data [ column ] = scipy . zeros ( len ( data ) ) return data
7,431
https://github.com/chaoss/grimoirelab-cereslib/blob/5110e6ca490a4f24bec3124286ebf51fd4e08bdd/cereslib/dfutils/format.py#L43-L59
[ "def", "cache_url_config", "(", "cls", ",", "url", ",", "backend", "=", "None", ")", ":", "url", "=", "urlparse", "(", "url", ")", "if", "not", "isinstance", "(", "url", ",", "cls", ".", "URL_CLASS", ")", "else", "url", "location", "=", "url", ".", "netloc", ".", "split", "(", "','", ")", "if", "len", "(", "location", ")", "==", "1", ":", "location", "=", "location", "[", "0", "]", "config", "=", "{", "'BACKEND'", ":", "cls", ".", "CACHE_SCHEMES", "[", "url", ".", "scheme", "]", ",", "'LOCATION'", ":", "location", ",", "}", "# Add the drive to LOCATION", "if", "url", ".", "scheme", "==", "'filecache'", ":", "config", ".", "update", "(", "{", "'LOCATION'", ":", "url", ".", "netloc", "+", "url", ".", "path", ",", "}", ")", "if", "url", ".", "path", "and", "url", ".", "scheme", "in", "[", "'memcache'", ",", "'pymemcache'", "]", ":", "config", ".", "update", "(", "{", "'LOCATION'", ":", "'unix:'", "+", "url", ".", "path", ",", "}", ")", "elif", "url", ".", "scheme", ".", "startswith", "(", "'redis'", ")", ":", "if", "url", ".", "hostname", ":", "scheme", "=", "url", ".", "scheme", ".", "replace", "(", "'cache'", ",", "''", ")", "else", ":", "scheme", "=", "'unix'", "locations", "=", "[", "scheme", "+", "'://'", "+", "loc", "+", "url", ".", "path", "for", "loc", "in", "url", ".", "netloc", ".", "split", "(", "','", ")", "]", "config", "[", "'LOCATION'", "]", "=", "locations", "[", "0", "]", "if", "len", "(", "locations", ")", "==", "1", "else", "locations", "if", "url", ".", "query", ":", "config_options", "=", "{", "}", "for", "k", ",", "v", "in", "parse_qs", "(", "url", ".", "query", ")", ".", "items", "(", ")", ":", "opt", "=", "{", "k", ".", "upper", "(", ")", ":", "_cast", "(", "v", "[", "0", "]", ")", "}", "if", "k", ".", "upper", "(", ")", "in", "cls", ".", "_CACHE_BASE_OPTIONS", ":", "config", ".", "update", "(", "opt", ")", "else", ":", "config_options", ".", "update", "(", "opt", ")", "config", "[", "'OPTIONS'", "]", "=", "config_options", "if", "backend", ":", "config", "[", "'BACKEND'", "]", "=", "backend", "return", "config" ]
This method updates the names of the fields according to matching
def update_field_names ( self , data , matching ) : for key in matching . keys ( ) : if key in data . columns : data . rename ( columns = { key : matching [ key ] } ) return data
7,432
https://github.com/chaoss/grimoirelab-cereslib/blob/5110e6ca490a4f24bec3124286ebf51fd4e08bdd/cereslib/dfutils/format.py#L61-L77
[ "def", "is_running", "(", "self", ")", ":", "pp", "=", "self", ".", "pid", "if", "pp", ":", "try", ":", "proc", "=", "psutil", ".", "Process", "(", "pp", ")", "# Possible status:", "# \"STATUS_RUNNING\", \"STATUS_IDLE\",", "# \"STATUS_SLEEPING\", \"STATUS_DISK_SLEEP\",", "# \"STATUS_STOPPED\", \"STATUS_TRACING_STOP\",", "# \"STATUS_ZOMBIE\", \"STATUS_DEAD\",", "# \"STATUS_WAKING\", \"STATUS_LOCKED\",", "if", "proc", ".", "status", "in", "(", "psutil", ".", "STATUS_STOPPED", ",", "psutil", ".", "STATUS_DEAD", ",", "psutil", ".", "STATUS_ZOMBIE", ")", ":", "# The PID is still in the process table so call stop to", "# remove the PID.", "self", ".", "stop", "(", ")", "return", "False", "else", ":", "# OK, it's running.", "return", "True", "except", "psutil", ".", "NoSuchProcess", ":", "pass", "return", "False" ]
This method translates columns values into datetime objects
def format_dates ( self , data , columns ) : for column in columns : if column in data . columns : data [ column ] = pandas . to_datetime ( data [ column ] ) return data
7,433
https://github.com/chaoss/grimoirelab-cereslib/blob/5110e6ca490a4f24bec3124286ebf51fd4e08bdd/cereslib/dfutils/format.py#L80-L96
[ "def", "rmse", "(", "params1", ",", "params2", ")", ":", "assert", "len", "(", "params1", ")", "==", "len", "(", "params2", ")", "params1", "=", "np", ".", "asarray", "(", "params1", ")", "-", "np", ".", "mean", "(", "params1", ")", "params2", "=", "np", ".", "asarray", "(", "params2", ")", "-", "np", ".", "mean", "(", "params2", ")", "sqrt_n", "=", "math", ".", "sqrt", "(", "len", "(", "params1", ")", ")", "return", "np", ".", "linalg", ".", "norm", "(", "params1", "-", "params2", ",", "ord", "=", "2", ")", "/", "sqrt_n" ]
This method removes columns in data
def remove_columns ( self , data , columns ) : for column in columns : if column in data . columns : data = data . drop ( column , axis = 1 ) return data
7,434
https://github.com/chaoss/grimoirelab-cereslib/blob/5110e6ca490a4f24bec3124286ebf51fd4e08bdd/cereslib/dfutils/format.py#L98-L114
[ "def", "beacon", "(", "config", ")", ":", "parts", "=", "psutil", ".", "disk_partitions", "(", "all", "=", "True", ")", "ret", "=", "[", "]", "for", "mounts", "in", "config", ":", "mount", "=", "next", "(", "iter", "(", "mounts", ")", ")", "# Because we're using regular expressions", "# if our mount doesn't end with a $, insert one.", "mount_re", "=", "mount", "if", "not", "mount", ".", "endswith", "(", "'$'", ")", ":", "mount_re", "=", "'{0}$'", ".", "format", "(", "mount", ")", "if", "salt", ".", "utils", ".", "platform", ".", "is_windows", "(", ")", ":", "# mount_re comes in formatted with a $ at the end", "# can be `C:\\\\$` or `C:\\\\\\\\$`", "# re string must be like `C:\\\\\\\\` regardless of \\\\ or \\\\\\\\", "# also, psutil returns uppercase", "mount_re", "=", "re", ".", "sub", "(", "r':\\\\\\$'", ",", "r':\\\\\\\\'", ",", "mount_re", ")", "mount_re", "=", "re", ".", "sub", "(", "r':\\\\\\\\\\$'", ",", "r':\\\\\\\\'", ",", "mount_re", ")", "mount_re", "=", "mount_re", ".", "upper", "(", ")", "for", "part", "in", "parts", ":", "if", "re", ".", "match", "(", "mount_re", ",", "part", ".", "mountpoint", ")", ":", "_mount", "=", "part", ".", "mountpoint", "try", ":", "_current_usage", "=", "psutil", ".", "disk_usage", "(", "_mount", ")", "except", "OSError", ":", "log", ".", "warning", "(", "'%s is not a valid mount point.'", ",", "_mount", ")", "continue", "current_usage", "=", "_current_usage", ".", "percent", "monitor_usage", "=", "mounts", "[", "mount", "]", "if", "'%'", "in", "monitor_usage", ":", "monitor_usage", "=", "re", ".", "sub", "(", "'%'", ",", "''", ",", "monitor_usage", ")", "monitor_usage", "=", "float", "(", "monitor_usage", ")", "if", "current_usage", ">=", "monitor_usage", ":", "ret", ".", "append", "(", "{", "'diskusage'", ":", "current_usage", ",", "'mount'", ":", "_mount", "}", ")", "return", "ret" ]
calculate Gruneisen parameter for the Tange equation
def tange_grun ( v , v0 , gamma0 , a , b ) : x = v / v0 return gamma0 * ( 1. + a * ( np . power ( x , b ) - 1. ) )
7,435
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_Tange.py#L13-L25
[ "def", "get_upload_path", "(", "instance", ",", "filename", ")", ":", "if", "not", "instance", ".", "name", ":", "instance", ".", "name", "=", "filename", "# set original filename", "date", "=", "timezone", ".", "now", "(", ")", ".", "date", "(", ")", "filename", "=", "'{name}.{ext}'", ".", "format", "(", "name", "=", "uuid4", "(", ")", ".", "hex", ",", "ext", "=", "filename", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", ")", "return", "os", ".", "path", ".", "join", "(", "'post_office_attachments'", ",", "str", "(", "date", ".", "year", ")", ",", "str", "(", "date", ".", "month", ")", ",", "str", "(", "date", ".", "day", ")", ",", "filename", ")" ]
calculate Debye temperature for the Tange equation
def tange_debyetemp ( v , v0 , gamma0 , a , b , theta0 ) : x = v / v0 gamma = tange_grun ( v , v0 , gamma0 , a , b ) if isuncertainties ( [ v , v0 , gamma0 , a , b , theta0 ] ) : theta = theta0 * np . power ( x , ( - 1. * ( 1. - a ) * gamma0 ) ) * unp . exp ( ( gamma0 - gamma ) / b ) else : theta = theta0 * np . power ( x , ( - 1. * ( 1. - a ) * gamma0 ) ) * np . exp ( ( gamma0 - gamma ) / b ) return theta
7,436
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_Tange.py#L28-L48
[ "def", "format_modified", "(", "self", ",", "modified", ",", "sep", "=", "\" \"", ")", ":", "if", "modified", "is", "not", "None", ":", "return", "modified", ".", "strftime", "(", "\"%Y-%m-%d{0}%H:%M:%S.%fZ\"", ".", "format", "(", "sep", ")", ")", "return", "u\"\"" ]
calculate thermal pressure for the Tange equation
def tange_pth ( v , temp , v0 , gamma0 , a , b , theta0 , n , z , t_ref = 300. , three_r = 3. * constants . R ) : v_mol = vol_uc2mol ( v , z ) gamma = tange_grun ( v , v0 , gamma0 , a , b ) theta = tange_debyetemp ( v , v0 , gamma0 , a , b , theta0 ) xx = theta / temp debye = debye_E ( xx ) if t_ref == 0. : debye0 = 0. else : xx0 = theta / t_ref debye0 = debye_E ( xx0 ) Eth0 = three_r * n * t_ref * debye0 Eth = three_r * n * temp * debye delEth = Eth - Eth0 p_th = ( gamma / v_mol * delEth ) * 1.e-9 return p_th
7,437
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_Tange.py#L51-L83
[ "def", "registration_backend", "(", "backend", "=", "None", ",", "namespace", "=", "None", ")", ":", "# type: (Optional[Text], Optional[Text]) -> BaseBackend", "backend", "=", "backend", "or", "ORGS_REGISTRATION_BACKEND", "class_module", ",", "class_name", "=", "backend", ".", "rsplit", "(", "\".\"", ",", "1", ")", "mod", "=", "import_module", "(", "class_module", ")", "return", "getattr", "(", "mod", ",", "class_name", ")", "(", "namespace", "=", "namespace", ")" ]
Little helper used by CapitainsCtsPassage here to comply with parents args
def _make_passage_kwargs ( urn , reference ) : kwargs = { } if urn is not None : if reference is not None : kwargs [ "urn" ] = URN ( "{}:{}" . format ( urn . upTo ( URN . VERSION ) , reference ) ) else : kwargs [ "urn" ] = urn return kwargs
7,438
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L33-L46
[ "def", "sync_readmes", "(", ")", ":", "print", "(", "\"syncing README\"", ")", "with", "open", "(", "\"README.md\"", ",", "'r'", ")", "as", "reader", ":", "file_text", "=", "reader", ".", "read", "(", ")", "with", "open", "(", "\"README\"", ",", "'w'", ")", "as", "writer", ":", "writer", ".", "write", "(", "file_text", ")" ]
Finds a passage in the current text
def getTextualNode ( self , subreference = None , simple = False ) : if subreference is None : return self . _getSimplePassage ( ) if not isinstance ( subreference , CtsReference ) : if isinstance ( subreference , str ) : subreference = CtsReference ( subreference ) elif isinstance ( subreference , list ) : subreference = CtsReference ( "." . join ( subreference ) ) if len ( subreference . start ) > self . citation . root . depth : raise CitationDepthError ( "URN is deeper than citation scheme" ) if simple is True : return self . _getSimplePassage ( subreference ) if not subreference . is_range ( ) : start = end = subreference . start . list else : start , end = subreference . start . list , subreference . end . list citation_start = self . citation . root [ len ( start ) - 1 ] citation_end = self . citation . root [ len ( end ) - 1 ] start , end = citation_start . fill ( passage = start ) , citation_end . fill ( passage = end ) start , end = normalizeXpath ( start . split ( "/" ) [ 2 : ] ) , normalizeXpath ( end . split ( "/" ) [ 2 : ] ) xml = self . textObject . xml if isinstance ( xml , etree . _Element ) : root = copyNode ( xml ) else : root = copyNode ( xml . getroot ( ) ) root = passageLoop ( xml , root , start , end ) if self . urn : urn = URN ( "{}:{}" . format ( self . urn , subreference ) ) else : urn = None return CapitainsCtsPassage ( urn = urn , resource = root , text = self , citation = citation_start , reference = subreference )
7,439
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L53-L109
[ "def", "_get_broadcast_shape", "(", "shape1", ",", "shape2", ")", ":", "if", "shape1", "==", "shape2", ":", "return", "shape1", "length1", "=", "len", "(", "shape1", ")", "length2", "=", "len", "(", "shape2", ")", "if", "length1", ">", "length2", ":", "shape", "=", "list", "(", "shape1", ")", "else", ":", "shape", "=", "list", "(", "shape2", ")", "i", "=", "max", "(", "length1", ",", "length2", ")", "-", "1", "for", "a", ",", "b", "in", "zip", "(", "shape1", "[", ":", ":", "-", "1", "]", ",", "shape2", "[", ":", ":", "-", "1", "]", ")", ":", "if", "a", "!=", "1", "and", "b", "!=", "1", "and", "a", "!=", "b", ":", "raise", "ValueError", "(", "'shape1=%s is not broadcastable to shape2=%s'", "%", "(", "shape1", ",", "shape2", ")", ")", "shape", "[", "i", "]", "=", "max", "(", "a", ",", "b", ")", "i", "-=", "1", "return", "tuple", "(", "shape", ")" ]
Retrieve a single node representing the passage .
def _getSimplePassage ( self , reference = None ) : if reference is None : return _SimplePassage ( resource = self . resource , reference = None , urn = self . urn , citation = self . citation . root , text = self ) subcitation = self . citation . root [ reference . depth - 1 ] resource = self . resource . xpath ( subcitation . fill ( reference ) , namespaces = XPATH_NAMESPACES ) if len ( resource ) != 1 : raise InvalidURN return _SimplePassage ( resource [ 0 ] , reference = reference , urn = self . urn , citation = subcitation , text = self . textObject )
7,440
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L111-L145
[ "def", "_set_virtual", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", "and", "key", "not", "in", "self", ".", "_virtual_keys", ":", "return", "# Do nothing for non-virtual keys.", "self", ".", "_virtual_keys", ".", "add", "(", "key", ")", "if", "key", "in", "self", "and", "self", "[", "key", "]", "is", "not", "value", ":", "self", ".", "_on_change", "(", "key", ",", "value", ")", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "value", ")", "for", "overlay", "in", "self", ".", "_iter_overlays", "(", ")", ":", "overlay", ".", "_set_virtual", "(", "key", ",", "value", ")" ]
CtsReference available at a given level
def getReffs ( self , level : int = 1 , subreference : CtsReference = None ) -> CtsReferenceSet : if not subreference and hasattr ( self , "reference" ) : subreference = self . reference elif subreference and not isinstance ( subreference , CtsReference ) : subreference = CtsReference ( subreference ) return self . getValidReff ( level = level , reference = subreference )
7,441
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L159-L172
[ "def", "convert_to_experiment_list", "(", "experiments", ")", ":", "exp_list", "=", "experiments", "# Transform list if necessary", "if", "experiments", "is", "None", ":", "exp_list", "=", "[", "]", "elif", "isinstance", "(", "experiments", ",", "Experiment", ")", ":", "exp_list", "=", "[", "experiments", "]", "elif", "type", "(", "experiments", ")", "is", "dict", ":", "exp_list", "=", "[", "Experiment", ".", "from_json", "(", "name", ",", "spec", ")", "for", "name", ",", "spec", "in", "experiments", ".", "items", "(", ")", "]", "# Validate exp_list", "if", "(", "type", "(", "exp_list", ")", "is", "list", "and", "all", "(", "isinstance", "(", "exp", ",", "Experiment", ")", "for", "exp", "in", "exp_list", ")", ")", ":", "if", "len", "(", "exp_list", ")", ">", "1", ":", "logger", ".", "warning", "(", "\"All experiments will be \"", "\"using the same SearchAlgorithm.\"", ")", "else", ":", "raise", "TuneError", "(", "\"Invalid argument: {}\"", ".", "format", "(", "experiments", ")", ")", "return", "exp_list" ]
Perform XPath on the passage XML
def xpath ( self , * args , * * kwargs ) : if "smart_strings" not in kwargs : kwargs [ "smart_strings" ] = False return self . resource . xpath ( * args , * * kwargs )
7,442
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L285-L295
[ "def", "_generate_noise_system", "(", "dimensions_tr", ",", "spatial_sd", ",", "temporal_sd", ",", "spatial_noise_type", "=", "'gaussian'", ",", "temporal_noise_type", "=", "'gaussian'", ",", ")", ":", "def", "noise_volume", "(", "dimensions", ",", "noise_type", ",", ")", ":", "if", "noise_type", "==", "'rician'", ":", "# Generate the Rician noise (has an SD of 1)", "noise", "=", "stats", ".", "rice", ".", "rvs", "(", "b", "=", "0", ",", "loc", "=", "0", ",", "scale", "=", "1.527", ",", "size", "=", "dimensions", ")", "elif", "noise_type", "==", "'exponential'", ":", "# Make an exponential distribution (has an SD of 1)", "noise", "=", "stats", ".", "expon", ".", "rvs", "(", "0", ",", "scale", "=", "1", ",", "size", "=", "dimensions", ")", "elif", "noise_type", "==", "'gaussian'", ":", "noise", "=", "np", ".", "random", ".", "randn", "(", "np", ".", "prod", "(", "dimensions", ")", ")", ".", "reshape", "(", "dimensions", ")", "# Return the noise", "return", "noise", "# Get just the xyz coordinates", "dimensions", "=", "np", ".", "asarray", "(", "[", "dimensions_tr", "[", "0", "]", ",", "dimensions_tr", "[", "1", "]", ",", "dimensions_tr", "[", "2", "]", ",", "1", "]", ")", "# Generate noise", "spatial_noise", "=", "noise_volume", "(", "dimensions", ",", "spatial_noise_type", ")", "temporal_noise", "=", "noise_volume", "(", "dimensions_tr", ",", "temporal_noise_type", ")", "# Make the system noise have a specific spatial variability", "spatial_noise", "*=", "spatial_sd", "# Set the size of the noise", "temporal_noise", "*=", "temporal_sd", "# The mean in time of system noise needs to be zero, so subtract the", "# means of the temporal noise in time", "temporal_noise_mean", "=", "np", ".", "mean", "(", "temporal_noise", ",", "3", ")", ".", "reshape", "(", "dimensions", "[", "0", "]", ",", "dimensions", "[", "1", "]", ",", "dimensions", "[", "2", "]", ",", "1", ")", "temporal_noise", "=", "temporal_noise", "-", "temporal_noise_mean", "# Save the combination", "system_noise", "=", "spatial_noise", "+", "temporal_noise", "return", "system_noise" ]
Transform the CapitainsCtsPassage in XML string
def tostring ( self , * args , * * kwargs ) : return etree . tostring ( self . resource , * args , * * kwargs )
7,443
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L297-L304
[ "def", "variable_state", "(", "cls", ",", "scripts", ",", "variables", ")", ":", "def", "conditionally_set_not_modified", "(", ")", ":", "\"\"\"Set the variable to modified if it hasn't been altered.\"\"\"", "state", "=", "variables", ".", "get", "(", "block", ".", "args", "[", "0", "]", ",", "None", ")", "if", "state", "==", "cls", ".", "STATE_NOT_MODIFIED", ":", "variables", "[", "block", ".", "args", "[", "0", "]", "]", "=", "cls", ".", "STATE_MODIFIED", "green_flag", ",", "other", "=", "partition_scripts", "(", "scripts", ",", "cls", ".", "HAT_GREEN_FLAG", ")", "variables", "=", "dict", "(", "(", "x", ",", "cls", ".", "STATE_NOT_MODIFIED", ")", "for", "x", "in", "variables", ")", "for", "script", "in", "green_flag", ":", "in_zone", "=", "True", "for", "name", ",", "level", ",", "block", "in", "cls", ".", "iter_blocks", "(", "script", ".", "blocks", ")", ":", "if", "name", "==", "'broadcast %s and wait'", ":", "in_zone", "=", "False", "if", "name", "==", "'set %s effect to %s'", ":", "state", "=", "variables", ".", "get", "(", "block", ".", "args", "[", "0", "]", ",", "None", ")", "if", "state", "is", "None", ":", "continue", "# Not a variable we care about", "if", "in_zone", "and", "level", "==", "0", ":", "# Success!", "if", "state", "==", "cls", ".", "STATE_NOT_MODIFIED", ":", "state", "=", "cls", ".", "STATE_INITIALIZED", "else", ":", "# Multiple when green flag clicked conflict", "# TODO: Need to allow multiple sets of a variable", "# within the same script", "# print 'CONFLICT', script", "state", "=", "cls", ".", "STATE_MODIFIED", "elif", "in_zone", ":", "continue", "# Conservative ignore for nested absolutes", "elif", "state", "==", "cls", ".", "STATE_NOT_MODIFIED", ":", "state", "=", "cls", ".", "STATE_MODIFIED", "variables", "[", "block", ".", "args", "[", "0", "]", "]", "=", "state", "elif", "name", "==", "'change %s effect by %s'", ":", "conditionally_set_not_modified", "(", ")", "for", "script", "in", "other", ":", "for", "name", ",", "_", ",", "block", "in", "cls", ".", "iter_blocks", "(", "script", ".", "blocks", ")", ":", "if", "name", "in", "(", "'change %s effect by %s'", ",", "'set %s effect to %s'", ")", ":", "conditionally_set_not_modified", "(", ")", "return", "variables" ]
Children of the passage
def childIds ( self ) : if self . depth >= len ( self . citation . root ) : return [ ] elif self . _children is not None : return self . _children else : self . _children = self . getReffs ( ) return self . _children
7,444
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/local/capitains/cts.py#L349-L361
[ "def", "_set_virtual", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", "and", "key", "not", "in", "self", ".", "_virtual_keys", ":", "return", "# Do nothing for non-virtual keys.", "self", ".", "_virtual_keys", ".", "add", "(", "key", ")", "if", "key", "in", "self", "and", "self", "[", "key", "]", "is", "not", "value", ":", "self", ".", "_on_change", "(", "key", ",", "value", ")", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "value", ")", "for", "overlay", "in", "self", ".", "_iter_overlays", "(", ")", ":", "overlay", ".", "_set_virtual", "(", "key", ",", "value", ")" ]
Returns a formatted string representing the coordinate . The format depends on the coordinate type .
def location ( hexgrid_type , coord ) : if hexgrid_type == TILE : return str ( coord ) elif hexgrid_type == NODE : tile_id = nearest_tile_to_node ( coord ) dirn = tile_node_offset_to_direction ( coord - tile_id_to_coord ( tile_id ) ) return '({} {})' . format ( tile_id , dirn ) elif hexgrid_type == EDGE : tile_id = nearest_tile_to_edge ( coord ) dirn = tile_edge_offset_to_direction ( coord - tile_id_to_coord ( tile_id ) ) return '({} {})' . format ( tile_id , dirn ) else : logging . warning ( 'unsupported hexgrid_type={}' . format ( hexgrid_type ) ) return None
7,445
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L72-L97
[ "def", "thaw_args", "(", "subparsers", ")", ":", "thaw_parser", "=", "subparsers", ".", "add_parser", "(", "'thaw'", ")", "thaw_parser", ".", "add_argument", "(", "'--gpg-password-path'", ",", "dest", "=", "'gpg_pass_path'", ",", "help", "=", "'Vault path of GPG passphrase location'", ")", "thaw_parser", ".", "add_argument", "(", "'--ignore-missing'", ",", "dest", "=", "'ignore_missing'", ",", "help", "=", "'Warn when secrets are missing from icefiles'", "'instead of exiting'", ",", "action", "=", "'store_true'", ",", "default", "=", "False", ")", "secretfile_args", "(", "thaw_parser", ")", "archive_args", "(", "thaw_parser", ")", "vars_args", "(", "thaw_parser", ")", "base_args", "(", "thaw_parser", ")" ]
Returns a list of coastal edge coordinate .
def coastal_edges ( tile_id ) : edges = list ( ) tile_coord = tile_id_to_coord ( tile_id ) for edge_coord in edges_touching_tile ( tile_id ) : dirn = tile_edge_offset_to_direction ( edge_coord - tile_coord ) if tile_id_in_direction ( tile_id , dirn ) is None : edges . append ( edge_coord ) return edges
7,446
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L147-L160
[ "def", "getpart", "(", "self", ",", "ix", ")", ":", "if", "self", ".", "offsets", "[", "ix", "]", "==", "0", ":", "return", "comp", ",", "ofs", ",", "size", ",", "checksum", "=", "self", ".", "getsectioninfo", "(", "ix", ")", "fh", "=", "FileSection", "(", "self", ".", "fh", ",", "ofs", ",", "ofs", "+", "size", ")", "if", "comp", "==", "2", ":", "import", "zlib", "# very old databases used a different compression scheme:\r", "wbits", "=", "-", "15", "if", "self", ".", "magic", "==", "'IDA0'", "else", "15", "fh", "=", "makeStringIO", "(", "zlib", ".", "decompress", "(", "fh", ".", "read", "(", "size", ")", ",", "wbits", ")", ")", "elif", "comp", "==", "0", ":", "pass", "else", ":", "raise", "Exception", "(", "\"unsupported section encoding: %02x\"", "%", "comp", ")", "return", "fh" ]
Variant on direction_to_tile . Returns None if there s no tile there .
def tile_id_in_direction ( from_tile_id , direction ) : coord_from = tile_id_to_coord ( from_tile_id ) for offset , dirn in _tile_tile_offsets . items ( ) : if dirn == direction : coord_to = coord_from + offset if coord_to in legal_tile_coords ( ) : return tile_id_from_coord ( coord_to ) return None
7,447
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L163-L177
[ "def", "truncate_schema", "(", "self", ")", ":", "assert", "self", ".", "server", "==", "'localhost'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DELETE FROM publication;'", ")", "cur", ".", "execute", "(", "'TRUNCATE systems CASCADE;'", ")", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return" ]
Convenience method wrapping tile_tile_offset_to_direction . Used to get the direction of the offset between two tiles . The tiles must be adjacent .
def direction_to_tile ( from_tile_id , to_tile_id ) : coord_from = tile_id_to_coord ( from_tile_id ) coord_to = tile_id_to_coord ( to_tile_id ) direction = tile_tile_offset_to_direction ( coord_to - coord_from ) # logging.debug('Tile direction: {}->{} is {}'.format( # from_tile.tile_id, # to_tile.tile_id, # direction # )) return direction
7,448
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L180-L197
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Returns the edge coordinate in the given direction at the given tile identifier .
def edge_coord_in_direction ( tile_id , direction ) : tile_coord = tile_id_to_coord ( tile_id ) for edge_coord in edges_touching_tile ( tile_id ) : if tile_edge_offset_to_direction ( edge_coord - tile_coord ) == direction : return edge_coord raise ValueError ( 'No edge found in direction={} at tile_id={}' . format ( direction , tile_id ) )
7,449
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L242-L257
[ "def", "truncate_schema", "(", "self", ")", ":", "assert", "self", ".", "server", "==", "'localhost'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DELETE FROM publication;'", ")", "cur", ".", "execute", "(", "'TRUNCATE systems CASCADE;'", ")", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return" ]
Returns the node coordinate in the given direction at the given tile identifier .
def node_coord_in_direction ( tile_id , direction ) : tile_coord = tile_id_to_coord ( tile_id ) for node_coord in nodes_touching_tile ( tile_id ) : if tile_node_offset_to_direction ( node_coord - tile_coord ) == direction : return node_coord raise ValueError ( 'No node found in direction={} at tile_id={}' . format ( direction , tile_id ) )
7,450
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L260-L275
[ "def", "_setup_conn_old", "(", "*", "*", "kwargs", ")", ":", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.api_url'", ",", "'http://localhost:8080'", ")", "username", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.user'", ")", "password", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.password'", ")", "ca_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-data'", ")", "client_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-data'", ")", "client_key", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-data'", ")", "ca_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-file'", ")", "client_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-file'", ")", "client_key_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-file'", ")", "# Override default API settings when settings are provided", "if", "'api_url'", "in", "kwargs", ":", "host", "=", "kwargs", ".", "get", "(", "'api_url'", ")", "if", "'api_user'", "in", "kwargs", ":", "username", "=", "kwargs", ".", "get", "(", "'api_user'", ")", "if", "'api_password'", "in", "kwargs", ":", "password", "=", "kwargs", ".", "get", "(", "'api_password'", ")", "if", "'api_certificate_authority_file'", "in", "kwargs", ":", "ca_cert_file", "=", "kwargs", ".", "get", "(", "'api_certificate_authority_file'", ")", "if", "'api_client_certificate_file'", "in", "kwargs", ":", "client_cert_file", "=", "kwargs", ".", "get", "(", "'api_client_certificate_file'", ")", "if", "'api_client_key_file'", "in", "kwargs", ":", "client_key_file", "=", "kwargs", ".", "get", "(", "'api_client_key_file'", ")", "if", "(", "kubernetes", ".", "client", ".", "configuration", ".", "host", "!=", "host", "or", "kubernetes", ".", "client", ".", "configuration", ".", "user", "!=", "username", "or", "kubernetes", ".", "client", ".", "configuration", ".", "password", "!=", "password", ")", ":", "# Recreates API connection if settings are changed", "kubernetes", ".", "client", ".", "configuration", ".", "__init__", "(", ")", "kubernetes", ".", "client", ".", "configuration", ".", "host", "=", "host", "kubernetes", ".", "client", ".", "configuration", ".", "user", "=", "username", "kubernetes", ".", "client", ".", "configuration", ".", "passwd", "=", "password", "if", "ca_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca_cert_file", "elif", "ca_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "ca", ":", "ca", ".", "write", "(", "base64", ".", "b64decode", "(", "ca_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "None", "if", "client_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "client_cert_file", "elif", "client_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "c", ":", "c", ".", "write", "(", "base64", ".", "b64decode", "(", "client_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "c", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "None", "if", "client_key_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "client_key_file", "elif", "client_key", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "k", ":", "k", ".", "write", "(", "base64", ".", "b64decode", "(", "client_key", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "k", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "None", "return", "{", "}" ]
Convert a tile coordinate to its corresponding tile identifier .
def tile_id_from_coord ( coord ) : for i , c in _tile_id_to_coord . items ( ) : if c == coord : return i raise Exception ( 'Tile id lookup failed, coord={} not found in map' . format ( hex ( coord ) ) )
7,451
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L293-L303
[ "def", "clean_new_password2", "(", "self", ")", ":", "password1", "=", "self", ".", "cleaned_data", ".", "get", "(", "'new_password1'", ")", "password2", "=", "self", ".", "cleaned_data", ".", "get", "(", "'new_password2'", ")", "try", ":", "directory", "=", "APPLICATION", ".", "default_account_store_mapping", ".", "account_store", "directory", ".", "password_policy", ".", "strength", ".", "validate_password", "(", "password2", ")", "except", "ValueError", "as", "e", ":", "raise", "forms", ".", "ValidationError", "(", "str", "(", "e", ")", ")", "if", "password1", "and", "password2", ":", "if", "password1", "!=", "password2", ":", "raise", "forms", ".", "ValidationError", "(", "\"The two passwords didn't match.\"", ")", "return", "password2" ]
Get the first tile found adjacent to the given edge . Returns a tile identifier .
def nearest_tile_to_edge_using_tiles ( tile_ids , edge_coord ) : for tile_id in tile_ids : if edge_coord - tile_id_to_coord ( tile_id ) in _tile_edge_offsets . keys ( ) : return tile_id logging . critical ( 'Did not find a tile touching edge={}' . format ( edge_coord ) )
7,452
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L317-L328
[ "def", "merge_config", "(", "self", ",", "user_config", ")", ":", "# provisioanlly update the default configurations with the user preferences", "temp_data_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "data_config", ")", ".", "update", "(", "user_config", ")", "temp_model_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "model_config", ")", ".", "update", "(", "user_config", ")", "temp_conversation_config", "=", "copy", ".", "deepcopy", "(", "self", ".", "conversation_config", ")", ".", "update", "(", "user_config", ")", "# if the new configurations validate, apply them", "if", "validate_data_config", "(", "temp_data_config", ")", ":", "self", ".", "data_config", "=", "temp_data_config", "if", "validate_model_config", "(", "temp_model_config", ")", ":", "self", ".", "model_config", "=", "temp_model_config", "if", "validate_conversation_config", "(", "temp_conversation_config", ")", ":", "self", ".", "conversation_config", "=", "temp_conversation_config" ]
Get the first tile found adjacent to the given node . Returns a tile identifier .
def nearest_tile_to_node_using_tiles ( tile_ids , node_coord ) : for tile_id in tile_ids : if node_coord - tile_id_to_coord ( tile_id ) in _tile_node_offsets . keys ( ) : return tile_id logging . critical ( 'Did not find a tile touching node={}' . format ( node_coord ) )
7,453
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L342-L353
[ "def", "set_USRdict", "(", "self", ",", "USRdict", "=", "{", "}", ")", ":", "self", ".", "_check_inputs", "(", "USRdict", "=", "USRdict", ")", "self", ".", "_USRdict", "=", "USRdict" ]
Get a list of edge coordinates touching the given tile .
def edges_touching_tile ( tile_id ) : coord = tile_id_to_coord ( tile_id ) edges = [ ] for offset in _tile_edge_offsets . keys ( ) : edges . append ( coord + offset ) # logging.debug('tile_id={}, edges touching={}'.format(tile_id, edges)) return edges
7,454
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L356-L368
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Get a list of node coordinates touching the given tile .
def nodes_touching_tile ( tile_id ) : coord = tile_id_to_coord ( tile_id ) nodes = [ ] for offset in _tile_node_offsets . keys ( ) : nodes . append ( coord + offset ) # logging.debug('tile_id={}, nodes touching={}'.format(tile_id, nodes)) return nodes
7,455
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L371-L383
[ "def", "__create_price_for", "(", "self", ",", "commodity", ":", "Commodity", ",", "price", ":", "PriceModel", ")", ":", "logging", ".", "info", "(", "\"Adding a new price for %s, %s, %s\"", ",", "commodity", ".", "mnemonic", ",", "price", ".", "datetime", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", ",", "price", ".", "value", ")", "# safety check. Compare currencies.", "sec_svc", "=", "SecurityAggregate", "(", "self", ".", "book", ",", "commodity", ")", "currency", "=", "sec_svc", ".", "get_currency", "(", ")", "if", "currency", "!=", "price", ".", "currency", ":", "raise", "ValueError", "(", "\"Requested currency does not match the currency previously used\"", ",", "currency", ",", "price", ".", "currency", ")", "# Description of the source field values:", "# https://www.gnucash.org/docs/v2.6/C/gnucash-help/tool-price.html", "new_price", "=", "Price", "(", "commodity", ",", "currency", ",", "price", ".", "datetime", ".", "date", "(", ")", ",", "price", ".", "value", ",", "source", "=", "\"Finance::Quote\"", ")", "commodity", ".", "prices", ".", "append", "(", "new_price", ")" ]
Returns the two node coordinates which are on the given edge coordinate .
def nodes_touching_edge ( edge_coord ) : a , b = hex_digit ( edge_coord , 1 ) , hex_digit ( edge_coord , 2 ) if a % 2 == 0 and b % 2 == 0 : return [ coord_from_hex_digits ( a , b + 1 ) , coord_from_hex_digits ( a + 1 , b ) ] else : return [ coord_from_hex_digits ( a , b ) , coord_from_hex_digits ( a + 1 , b + 1 ) ]
7,456
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L386-L398
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Return all legal edge coordinates on the grid .
def legal_edge_coords ( ) : edges = set ( ) for tile_id in legal_tile_ids ( ) : for edge in edges_touching_tile ( tile_id ) : edges . add ( edge ) logging . debug ( 'Legal edge coords({})={}' . format ( len ( edges ) , edges ) ) return edges
7,457
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L401-L410
[ "def", "example", "(", ")", ":", "client", "=", "Client", "(", "ACCOUNT_SID", ",", "AUTH_TOKEN", ")", "# Using Client Validation requires using API Keys for auth", "# First create an API key using the standard account sid, auth token client", "print", "(", "'Creating new api key...'", ")", "api_key", "=", "client", ".", "new_keys", ".", "create", "(", "friendly_name", "=", "'ClientValidationApiKey'", ")", "# Generate a new RSA Keypair", "print", "(", "'Generating RSA key pair...'", ")", "key_pair", "=", "rsa", ".", "generate_private_key", "(", "public_exponent", "=", "65537", ",", "key_size", "=", "2048", ",", "backend", "=", "default_backend", "(", ")", ")", "public_key", "=", "key_pair", ".", "public_key", "(", ")", ".", "public_bytes", "(", "Encoding", ".", "PEM", ",", "PublicFormat", ".", "SubjectPublicKeyInfo", ")", "private_key", "=", "key_pair", ".", "private_bytes", "(", "Encoding", ".", "PEM", ",", "PrivateFormat", ".", "PKCS8", ",", "NoEncryption", "(", ")", ")", "# Register the public key with Twilio", "print", "(", "'Registering public key with Twilio...'", ")", "credential", "=", "client", ".", "accounts", ".", "credentials", ".", "public_key", ".", "create", "(", "public_key", ",", "friendly_name", "=", "'ClientValidationPublicKey'", ")", "# Create a new ValidationClient with the keys we created", "validation_client", "=", "ValidationClient", "(", "ACCOUNT_SID", ",", "api_key", ".", "sid", ",", "credential", ".", "sid", ",", "private_key", ")", "# Create a REST Client using the validation_client", "client", "=", "Client", "(", "api_key", ".", "sid", ",", "api_key", ".", "secret", ",", "ACCOUNT_SID", ",", "http_client", "=", "validation_client", ")", "# Use the library as usual", "print", "(", "'Trying out client validation...'", ")", "messages", "=", "client", ".", "messages", ".", "list", "(", "limit", "=", "10", ")", "for", "m", "in", "messages", ":", "print", "(", "'Message {}'", ".", "format", "(", "m", ".", "sid", ")", ")", "print", "(", "'Client validation works!'", ")" ]
Return all legal node coordinates on the grid
def legal_node_coords ( ) : nodes = set ( ) for tile_id in legal_tile_ids ( ) : for node in nodes_touching_tile ( tile_id ) : nodes . add ( node ) logging . debug ( 'Legal node coords({})={}' . format ( len ( nodes ) , nodes ) ) return nodes
7,458
https://github.com/rosshamish/hexgrid/blob/16abb1822dc2789cb355f54fb06c7774eea1d9f2/hexgrid.py#L413-L422
[ "def", "skeleton", "(", "files", ",", "metadata", ",", "sqlite_extensions", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "metadata", ")", ":", "click", ".", "secho", "(", "\"File {} already exists, will not over-write\"", ".", "format", "(", "metadata", ")", ",", "bg", "=", "\"red\"", ",", "fg", "=", "\"white\"", ",", "bold", "=", "True", ",", "err", "=", "True", ",", ")", "sys", ".", "exit", "(", "1", ")", "app", "=", "Datasette", "(", "files", ",", "sqlite_extensions", "=", "sqlite_extensions", ")", "databases", "=", "{", "}", "for", "database_name", ",", "info", "in", "app", ".", "inspect", "(", ")", ".", "items", "(", ")", ":", "databases", "[", "database_name", "]", "=", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"queries\"", ":", "{", "}", ",", "\"tables\"", ":", "{", "table_name", ":", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"units\"", ":", "{", "}", ",", "}", "for", "table_name", "in", "(", "info", ".", "get", "(", "\"tables\"", ")", "or", "{", "}", ")", "}", ",", "}", "open", "(", "metadata", ",", "\"w\"", ")", ".", "write", "(", "json", ".", "dumps", "(", "{", "\"title\"", ":", "None", ",", "\"description\"", ":", "None", ",", "\"description_html\"", ":", "None", ",", "\"license\"", ":", "None", ",", "\"license_url\"", ":", "None", ",", "\"source\"", ":", "None", ",", "\"source_url\"", ":", "None", ",", "\"databases\"", ":", "databases", ",", "}", ",", "indent", "=", "4", ",", ")", ")", "click", ".", "echo", "(", "\"Wrote skeleton to {}\"", ".", "format", "(", "metadata", ")", ")" ]
provison Manila with HA
def make ( parser ) : s = parser . add_subparsers ( title = 'commands' , metavar = 'COMMAND' , help = 'description' , ) def create_manila_db_f ( args ) : create_manila_db ( args ) create_manila_db_parser = create_manila_db_subparser ( s ) create_manila_db_parser . set_defaults ( func = create_manila_db_f ) def create_service_credentials_f ( args ) : create_service_credentials ( args ) create_service_credentials_parser = create_service_credentials_subparser ( s ) create_service_credentials_parser . set_defaults ( func = create_service_credentials_f ) def install_f ( args ) : install ( args ) install_parser = install_subparser ( s ) install_parser . set_defaults ( func = install_f )
7,459
https://github.com/jiasir/playback/blob/58b2a5d669dcfaa8cad50c544a4b068dcacf9b69/playback/cli/manila.py#L163-L184
[ "def", "_inferSchema", "(", "self", ",", "rdd", ",", "samplingRatio", "=", "None", ",", "names", "=", "None", ")", ":", "first", "=", "rdd", ".", "first", "(", ")", "if", "not", "first", ":", "raise", "ValueError", "(", "\"The first row in RDD is empty, \"", "\"can not infer schema\"", ")", "if", "type", "(", "first", ")", "is", "dict", ":", "warnings", ".", "warn", "(", "\"Using RDD of dict to inferSchema is deprecated. \"", "\"Use pyspark.sql.Row instead\"", ")", "if", "samplingRatio", "is", "None", ":", "schema", "=", "_infer_schema", "(", "first", ",", "names", "=", "names", ")", "if", "_has_nulltype", "(", "schema", ")", ":", "for", "row", "in", "rdd", ".", "take", "(", "100", ")", "[", "1", ":", "]", ":", "schema", "=", "_merge_type", "(", "schema", ",", "_infer_schema", "(", "row", ",", "names", "=", "names", ")", ")", "if", "not", "_has_nulltype", "(", "schema", ")", ":", "break", "else", ":", "raise", "ValueError", "(", "\"Some of types cannot be determined by the \"", "\"first 100 rows, please try again with sampling\"", ")", "else", ":", "if", "samplingRatio", "<", "0.99", ":", "rdd", "=", "rdd", ".", "sample", "(", "False", ",", "float", "(", "samplingRatio", ")", ")", "schema", "=", "rdd", ".", "map", "(", "lambda", "row", ":", "_infer_schema", "(", "row", ",", "names", ")", ")", ".", "reduce", "(", "_merge_type", ")", "return", "schema" ]
Return a new tree with value associated at index .
def assoc ( self , index , value ) : newnode = LookupTreeNode ( index , value ) newtree = LookupTree ( ) newtree . root = _assoc_down ( self . root , newnode , 0 ) return newtree
7,460
https://github.com/zhemao/funktown/blob/8d5c5a8bdad2b85b33b4cea3febd820c2657c375/funktown/lookuptree.py#L73-L78
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Return new tree with index removed .
def remove ( self , index ) : newtree = LookupTree ( ) newtree . root = _remove_down ( self . root , index , 0 ) return newtree
7,461
https://github.com/zhemao/funktown/blob/8d5c5a8bdad2b85b33b4cea3febd820c2657c375/funktown/lookuptree.py#L92-L96
[ "def", "default_asset_manager", "(", "self", ")", ":", "cache_path", "=", "None", "cache_directory", "=", "self", ".", "config", "[", "'CACHE_DIRECTORY'", "]", "if", "cache_directory", ":", "cache_directory", "=", "cache_directory", ".", "format", "(", "version", "=", "__version__", ")", "cache_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "instance_path", ",", "cache_directory", ")", "return", "GitHubAssetManager", "(", "cache_path", ",", "self", ".", "config", "[", "'STYLE_URLS'", "]", ",", "self", ".", "quiet", ")" ]
Insert a node in - place . It is highly suggested that you do not use this method . Use assoc instead
def insert ( self , index , value ) : newnode = LookupTreeNode ( index , value ) level = 0 node = self . root while True : ind = _getbits ( newnode . index , level ) level += 1 child = node . children [ ind ] if child is None or child . index == newnode . index : if child : assert child . value == newnode . value node . children [ ind ] = newnode break elif child . index == _root_index : # This is a branch node = child else : branch = LookupTreeNode ( ) nind = _getbits ( newnode . index , level ) cind = _getbits ( child . index , level ) node . children [ ind ] = branch # Life gets tricky when... if nind == cind : branch . children [ cind ] = child # recurse node = branch else : branch . children [ nind ] = newnode branch . children [ cind ] = child break
7,462
https://github.com/zhemao/funktown/blob/8d5c5a8bdad2b85b33b4cea3febd820c2657c375/funktown/lookuptree.py#L98-L130
[ "def", "if_sqlserver_disable_constraints_triggers", "(", "session", ":", "SqlASession", ",", "tablename", ":", "str", ")", "->", "None", ":", "with", "if_sqlserver_disable_constraints", "(", "session", ",", "tablename", ")", ":", "with", "if_sqlserver_disable_triggers", "(", "session", ",", "tablename", ")", ":", "yield" ]
Reset the registry to the standard codecs .
def reset ( cls ) : cls . _codecs = { } c = cls . _codec for ( name , encode , decode ) in cls . _common_codec_data : cls . _codecs [ name ] = c ( encode , decode )
7,463
https://github.com/ivilata/pymultihash/blob/093365f20f6d8627c1fae13e0f4e0b35e9b39ad2/multihash/codecs.py#L57-L62
[ "def", "get_InsideConvexPoly", "(", "self", ",", "RelOff", "=", "_def", ".", "TorRelOff", ",", "ZLim", "=", "'Def'", ",", "Spline", "=", "True", ",", "Splprms", "=", "_def", ".", "TorSplprms", ",", "NP", "=", "_def", ".", "TorInsideNP", ",", "Plot", "=", "False", ",", "Test", "=", "True", ")", ":", "return", "_comp", ".", "_Ves_get_InsideConvexPoly", "(", "self", ".", "Poly_closed", ",", "self", ".", "dgeom", "[", "'P2Min'", "]", ",", "self", ".", "dgeom", "[", "'P2Max'", "]", ",", "self", ".", "dgeom", "[", "'BaryS'", "]", ",", "RelOff", "=", "RelOff", ",", "ZLim", "=", "ZLim", ",", "Spline", "=", "Spline", ",", "Splprms", "=", "Splprms", ",", "NP", "=", "NP", ",", "Plot", "=", "Plot", ",", "Test", "=", "Test", ")" ]
Add a codec to the registry .
def register ( cls , name , encode , decode ) : cls . _codecs [ name ] = cls . _codec ( encode , decode )
7,464
https://github.com/ivilata/pymultihash/blob/093365f20f6d8627c1fae13e0f4e0b35e9b39ad2/multihash/codecs.py#L65-L80
[ "def", "remove_data_flow", "(", "self", ",", "data_flow_id", ",", "destroy", "=", "True", ")", ":", "if", "data_flow_id", "not", "in", "self", ".", "_data_flows", ":", "raise", "AttributeError", "(", "\"The data_flow_id %s does not exist\"", "%", "str", "(", "data_flow_id", ")", ")", "self", ".", "_data_flows", "[", "data_flow_id", "]", ".", "parent", "=", "None", "return", "self", ".", "_data_flows", ".", "pop", "(", "data_flow_id", ")" ]
Default formatter . Convert value to string .
def default_formatter ( handler , item , value ) : if hasattr ( value , '__unicode__' ) : value = value . __unicode__ ( ) return escape ( str ( value ) )
7,465
https://github.com/klen/muffin-admin/blob/404dc8e5107e943b7c42fa21c679c34ddb4de1d5/muffin_admin/formatters.py#L7-L12
[ "def", "_find_start_time", "(", "hdr", ",", "s_freq", ")", ":", "start_time", "=", "hdr", "[", "'stc'", "]", "[", "'creation_time'", "]", "for", "one_stamp", "in", "hdr", "[", "'stamps'", "]", ":", "if", "one_stamp", "[", "'segment_name'", "]", ".", "decode", "(", ")", "==", "hdr", "[", "'erd'", "]", "[", "'filename'", "]", ":", "offset", "=", "one_stamp", "[", "'start_stamp'", "]", "break", "erd_time", "=", "(", "hdr", "[", "'erd'", "]", "[", "'creation_time'", "]", "-", "timedelta", "(", "seconds", "=", "offset", "/", "s_freq", ")", ")", ".", "replace", "(", "microsecond", "=", "0", ")", "stc_erd_diff", "=", "(", "start_time", "-", "erd_time", ")", ".", "total_seconds", "(", ")", "if", "stc_erd_diff", ">", "START_TIME_TOL", ":", "lg", ".", "warn", "(", "'Time difference between ERD and STC is {} s so using ERD time'", "' at {}'", ".", "format", "(", "stc_erd_diff", ",", "erd_time", ")", ")", "start_time", "=", "erd_time", "return", "start_time" ]
Format list .
def list_formatter ( handler , item , value ) : return u', ' . join ( str ( v ) for v in value )
7,466
https://github.com/klen/muffin-admin/blob/404dc8e5107e943b7c42fa21c679c34ddb4de1d5/muffin_admin/formatters.py#L21-L23
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Format value .
def format_value ( handler , item , column ) : value = getattr ( item , column , None ) formatter = FORMATTERS . get ( type ( value ) , default_formatter ) return formatter ( handler , item , value )
7,467
https://github.com/klen/muffin-admin/blob/404dc8e5107e943b7c42fa21c679c34ddb4de1d5/muffin_admin/formatters.py#L49-L53
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Regex string for optionally signed binary or privative feature .
def make_regex ( string ) : if string and string [ 0 ] in '+-' : sign , name = string [ 0 ] , string [ 1 : ] if not name or '+' in name or '-' in name : raise ValueError ( 'inappropriate feature name: %r' % string ) tmpl = r'([+]?%s)' if sign == '+' else r'(-%s)' return tmpl % name if not string or '+' in string or '-' in string : raise ValueError ( 'inappropriate feature name: %r' % string ) return r'(%s)' % string
7,468
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/parsers.py#L18-L45
[ "def", "_bind_topics", "(", "self", ",", "topics", ")", ":", "# FIXME: Allow for these subscriptions to fail and clean up the previous ones", "# so that this function is atomic", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "status", ",", "self", ".", "_on_status_message", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "tracing", ",", "self", ".", "_on_trace", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "streaming", ",", "self", ".", "_on_report", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "response", ",", "self", ".", "_on_response_message", ")" ]
Yield all feature name pairs in substring relation .
def substring_names ( features ) : names = tools . uniqued ( map ( remove_sign , features ) ) for l , r in permutations ( names , 2 ) : if l in r : yield ( l , r )
7,469
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/parsers.py#L48-L57
[ "def", "connection", "(", "cls", ")", ":", "local", "=", "cls", ".", "_threadlocal", "if", "not", "getattr", "(", "local", ",", "'connection'", ",", "None", ")", ":", "# Make sure these variables are no longer affected by other threads.", "local", ".", "user", "=", "cls", ".", "user", "local", ".", "password", "=", "cls", ".", "password", "local", ".", "site", "=", "cls", ".", "site", "local", ".", "timeout", "=", "cls", ".", "timeout", "local", ".", "headers", "=", "cls", ".", "headers", "local", ".", "format", "=", "cls", ".", "format", "local", ".", "version", "=", "cls", ".", "version", "local", ".", "url", "=", "cls", ".", "url", "if", "cls", ".", "site", "is", "None", ":", "raise", "ValueError", "(", "\"No shopify session is active\"", ")", "local", ".", "connection", "=", "ShopifyConnection", "(", "cls", ".", "site", ",", "cls", ".", "user", ",", "cls", ".", "password", ",", "cls", ".", "timeout", ",", "cls", ".", "format", ")", "return", "local", ".", "connection" ]
Return the nearest featureset that subsumes all given ones .
def join ( self , featuresets ) : concepts = ( f . concept for f in featuresets ) join = self . lattice . join ( concepts ) return self . _featuresets [ join . index ]
7,470
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/systems.py#L180-L184
[ "def", "agent_check_register", "(", "consul_url", "=", "None", ",", "token", "=", "None", ",", "*", "*", "kwargs", ")", ":", "ret", "=", "{", "}", "data", "=", "{", "}", "if", "not", "consul_url", ":", "consul_url", "=", "_get_config", "(", ")", "if", "not", "consul_url", ":", "log", ".", "error", "(", "'No Consul URL found.'", ")", "ret", "[", "'message'", "]", "=", "'No Consul URL found.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "'name'", "in", "kwargs", ":", "data", "[", "'Name'", "]", "=", "kwargs", "[", "'name'", "]", "else", ":", "raise", "SaltInvocationError", "(", "'Required argument \"name\" is missing.'", ")", "if", "True", "not", "in", "[", "True", "for", "item", "in", "(", "'script'", ",", "'http'", ",", "'ttl'", ")", "if", "item", "in", "kwargs", "]", ":", "ret", "[", "'message'", "]", "=", "'Required parameter \"script\" or \"http\" is missing.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "if", "'id'", "in", "kwargs", ":", "data", "[", "'ID'", "]", "=", "kwargs", "[", "'id'", "]", "if", "'notes'", "in", "kwargs", ":", "data", "[", "'Notes'", "]", "=", "kwargs", "[", "'notes'", "]", "if", "'script'", "in", "kwargs", ":", "if", "'interval'", "not", "in", "kwargs", ":", "ret", "[", "'message'", "]", "=", "'Required parameter \"interval\" is missing.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "data", "[", "'Script'", "]", "=", "kwargs", "[", "'script'", "]", "data", "[", "'Interval'", "]", "=", "kwargs", "[", "'interval'", "]", "if", "'http'", "in", "kwargs", ":", "if", "'interval'", "not", "in", "kwargs", ":", "ret", "[", "'message'", "]", "=", "'Required parameter \"interval\" is missing.'", "ret", "[", "'res'", "]", "=", "False", "return", "ret", "data", "[", "'HTTP'", "]", "=", "kwargs", "[", "'http'", "]", "data", "[", "'Interval'", "]", "=", "kwargs", "[", "'interval'", "]", "if", "'ttl'", "in", "kwargs", ":", "data", "[", "'TTL'", "]", "=", "kwargs", "[", "'ttl'", "]", "function", "=", "'agent/check/register'", "res", "=", "_query", "(", "consul_url", "=", "consul_url", ",", "function", "=", "function", ",", "token", "=", "token", ",", "method", "=", "'PUT'", ",", "data", "=", "data", ")", "if", "res", "[", "'res'", "]", ":", "ret", "[", "'res'", "]", "=", "True", "ret", "[", "'message'", "]", "=", "(", "'Check {0} added to agent.'", ".", "format", "(", "kwargs", "[", "'name'", "]", ")", ")", "else", ":", "ret", "[", "'res'", "]", "=", "False", "ret", "[", "'message'", "]", "=", "'Unable to add check to agent.'", "return", "ret" ]
Return the nearest featureset that implies all given ones .
def meet ( self , featuresets ) : concepts = ( f . concept for f in featuresets ) meet = self . lattice . meet ( concepts ) return self . _featuresets [ meet . index ]
7,471
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/systems.py#L186-L190
[ "def", "ParseApplicationResourceUsage", "(", "self", ",", "parser_mediator", ",", "cache", "=", "None", ",", "database", "=", "None", ",", "table", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "self", ".", "_ParseGUIDTable", "(", "parser_mediator", ",", "cache", ",", "database", ",", "table", ",", "self", ".", "_APPLICATION_RESOURCE_USAGE_VALUES_MAP", ",", "SRUMApplicationResourceUsageEventData", ")" ]
Yield all featuresets that subsume any of the given ones .
def upset_union ( self , featuresets ) : concepts = ( f . concept for f in featuresets ) indexes = ( c . index for c in self . lattice . upset_union ( concepts ) ) return map ( self . _featuresets . __getitem__ , indexes )
7,472
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/systems.py#L192-L196
[ "def", "_encode_msg", "(", "self", ",", "start_pos", ",", "offset", ",", "timestamp", ",", "key", ",", "value", ",", "attributes", "=", "0", ")", ":", "magic", "=", "self", ".", "_magic", "buf", "=", "self", ".", "_buffer", "pos", "=", "start_pos", "# Write key and value", "pos", "+=", "self", ".", "KEY_OFFSET_V0", "if", "magic", "==", "0", "else", "self", ".", "KEY_OFFSET_V1", "if", "key", "is", "None", ":", "struct", ".", "pack_into", "(", "\">i\"", ",", "buf", ",", "pos", ",", "-", "1", ")", "pos", "+=", "self", ".", "KEY_LENGTH", "else", ":", "key_size", "=", "len", "(", "key", ")", "struct", ".", "pack_into", "(", "\">i\"", ",", "buf", ",", "pos", ",", "key_size", ")", "pos", "+=", "self", ".", "KEY_LENGTH", "buf", "[", "pos", ":", "pos", "+", "key_size", "]", "=", "key", "pos", "+=", "key_size", "if", "value", "is", "None", ":", "struct", ".", "pack_into", "(", "\">i\"", ",", "buf", ",", "pos", ",", "-", "1", ")", "pos", "+=", "self", ".", "VALUE_LENGTH", "else", ":", "value_size", "=", "len", "(", "value", ")", "struct", ".", "pack_into", "(", "\">i\"", ",", "buf", ",", "pos", ",", "value_size", ")", "pos", "+=", "self", ".", "VALUE_LENGTH", "buf", "[", "pos", ":", "pos", "+", "value_size", "]", "=", "value", "pos", "+=", "value_size", "length", "=", "(", "pos", "-", "start_pos", ")", "-", "self", ".", "LOG_OVERHEAD", "# Write msg header. Note, that Crc will be updated later", "if", "magic", "==", "0", ":", "self", ".", "HEADER_STRUCT_V0", ".", "pack_into", "(", "buf", ",", "start_pos", ",", "offset", ",", "length", ",", "0", ",", "magic", ",", "attributes", ")", "else", ":", "self", ".", "HEADER_STRUCT_V1", ".", "pack_into", "(", "buf", ",", "start_pos", ",", "offset", ",", "length", ",", "0", ",", "magic", ",", "attributes", ",", "timestamp", ")", "# Calculate CRC for msg", "crc_data", "=", "memoryview", "(", "buf", ")", "[", "start_pos", "+", "self", ".", "MAGIC_OFFSET", ":", "]", "crc", "=", "calc_crc32", "(", "crc_data", ")", "struct", ".", "pack_into", "(", "\">I\"", ",", "buf", ",", "start_pos", "+", "self", ".", "CRC_OFFSET", ",", "crc", ")", "return", "crc" ]
Return the system lattice visualization as graphviz source .
def graphviz ( self , highlight = None , maximal_label = None , topdown = None , filename = None , directory = None , render = False , view = False ) : return visualize . featuresystem ( self , highlight , maximal_label , topdown , filename , directory , render , view )
7,473
https://github.com/xflr6/features/blob/f985304dd642da6ecdc66d85167d00daa4efe5f4/features/systems.py#L204-L208
[ "def", "_group_chunks_by_entities", "(", "self", ",", "chunks", ",", "entities", ")", ":", "for", "entity", "in", "entities", ":", "chunks_to_concat", "=", "chunks", ".", "get_overlaps", "(", "entity", "[", "'beginOffset'", "]", ",", "len", "(", "entity", "[", "'content'", "]", ")", ")", "if", "not", "chunks_to_concat", ":", "continue", "new_chunk_word", "=", "u''", ".", "join", "(", "[", "chunk", ".", "word", "for", "chunk", "in", "chunks_to_concat", "]", ")", "new_chunk", "=", "Chunk", "(", "new_chunk_word", ")", "chunks", ".", "swap", "(", "chunks_to_concat", ",", "new_chunk", ")", "return", "chunks" ]
Do a soap request .
def soap_action ( self , service , action , payloadbody ) : payload = self . soapenvelope . format ( body = payloadbody ) . encode ( 'utf-8' ) headers = { "Host" : self . url , "Content-Type" : "text/xml; charset=UTF-8" , "Cache-Control" : "no-cache" , "Content-Length" : str ( len ( payload ) ) , "SOAPAction" : action } try : self . last_exception = None response = requests . post ( url = self . url + service , headers = headers , data = payload , cookies = self . cookies ) except requests . exceptions . RequestException as exp : self . last_exception = exp return False if response . status_code != 200 : self . last_response = response return False self . cookies = response . cookies try : xdoc = xml . etree . ElementTree . fromstring ( response . text ) except xml . etree . ElementTree . ParseError as exp : self . last_exception = exp self . last_response = response return False return xdoc
7,474
https://github.com/dingusdk/PythonIhcSdk/blob/7e2067e009fe7600b49f30bff1cf91dc72fc891e/ihcsdk/ihcconnection.py#L24-L49
[ "def", "classify_languages", "(", "self", ")", ":", "return", "BlobsWithLanguageDataFrame", "(", "self", ".", "_engine_dataframe", ".", "classifyLanguages", "(", ")", ",", "self", ".", "_session", ",", "self", ".", "_implicits", ")" ]
Given a resource CtsText will compute valid reffs
def getValidReff ( self , level = 1 , reference = None ) : if reference : urn = "{0}:{1}" . format ( self . urn , reference ) else : urn = str ( self . urn ) if level == - 1 : level = len ( self . citation ) xml = self . retriever . getValidReff ( level = level , urn = urn ) xml = xmlparser ( xml ) self . _parse_request ( xml . xpath ( "//ti:request" , namespaces = XPATH_NAMESPACES ) [ 0 ] ) return [ ref . split ( ":" ) [ - 1 ] for ref in xml . xpath ( "//ti:reply//ti:urn/text()" , namespaces = XPATH_NAMESPACES ) ]
7,475
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L63-L88
[ "def", "hide", "(", "self", ",", "selections", ")", ":", "if", "'atoms'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'atoms'", "]", "=", "selections", "[", "'atoms'", "]", "self", ".", "on_atom_hidden_changed", "(", ")", "if", "'bonds'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'bonds'", "]", "=", "selections", "[", "'bonds'", "]", "self", ".", "on_bond_hidden_changed", "(", ")", "if", "'box'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'box'", "]", "=", "box_s", "=", "selections", "[", "'box'", "]", "if", "box_s", ".", "mask", "[", "0", "]", ":", "if", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "remove_renderer", "(", "self", ".", "box_renderer", ")", "else", ":", "if", "not", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "add_renderer", "(", "self", ".", "box_renderer", ")", "return", "self", ".", "hidden_state" ]
Retrieve a passage and store it in the object
def getTextualNode ( self , subreference = None ) : if isinstance ( subreference , URN ) : urn = str ( subreference ) elif isinstance ( subreference , CtsReference ) : urn = "{0}:{1}" . format ( self . urn , str ( subreference ) ) elif isinstance ( subreference , str ) : if ":" in subreference : urn = subreference else : urn = "{0}:{1}" . format ( self . urn . upTo ( URN . NO_PASSAGE ) , subreference ) elif isinstance ( subreference , list ) : urn = "{0}:{1}" . format ( self . urn , "." . join ( subreference ) ) else : urn = str ( self . urn ) response = xmlparser ( self . retriever . getPassage ( urn = urn ) ) self . _parse_request ( response . xpath ( "//ti:request" , namespaces = XPATH_NAMESPACES ) [ 0 ] ) return CtsPassage ( urn = urn , resource = response , retriever = self . retriever )
7,476
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L90-L117
[ "def", "_set_virtual", "(", "self", ",", "key", ",", "value", ")", ":", "if", "key", "in", "self", "and", "key", "not", "in", "self", ".", "_virtual_keys", ":", "return", "# Do nothing for non-virtual keys.", "self", ".", "_virtual_keys", ".", "add", "(", "key", ")", "if", "key", "in", "self", "and", "self", "[", "key", "]", "is", "not", "value", ":", "self", ".", "_on_change", "(", "key", ",", "value", ")", "dict", ".", "__setitem__", "(", "self", ",", "key", ",", "value", ")", "for", "overlay", "in", "self", ".", "_iter_overlays", "(", ")", ":", "overlay", ".", "_set_virtual", "(", "key", ",", "value", ")" ]
Retrieve a passage and informations around it and store it in the object
def getPassagePlus ( self , reference = None ) : if reference : urn = "{0}:{1}" . format ( self . urn , reference ) else : urn = str ( self . urn ) response = xmlparser ( self . retriever . getPassagePlus ( urn = urn ) ) passage = CtsPassage ( urn = urn , resource = response , retriever = self . retriever ) passage . _parse_request ( response . xpath ( "//ti:reply/ti:label" , namespaces = XPATH_NAMESPACES ) [ 0 ] ) self . citation = passage . citation return passage
7,477
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L134-L153
[ "def", "getInitialSample", "(", "self", ",", "wmg", ")", ":", "cands", "=", "range", "(", "len", "(", "wmg", ")", ")", "allPairs", "=", "itertools", ".", "combinations", "(", "cands", ",", "2", ")", "V", "=", "self", ".", "createBinaryRelation", "(", "len", "(", "cands", ")", ")", "for", "pair", "in", "allPairs", ":", "if", "wmg", "[", "pair", "[", "0", "]", "+", "1", "]", "[", "pair", "[", "1", "]", "+", "1", "]", ">", "0", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "1", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "0", "else", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "0", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "1", "return", "V" ]
Parse a request with metadata information
def _parse_request ( self , xml ) : for node in xml . xpath ( ".//ti:groupname" , namespaces = XPATH_NAMESPACES ) : lang = node . get ( "xml:lang" ) or CtsText . DEFAULT_LANG self . metadata . add ( RDF_NAMESPACES . CTS . groupname , lang = lang , value = node . text ) self . set_creator ( node . text , lang ) for node in xml . xpath ( ".//ti:title" , namespaces = XPATH_NAMESPACES ) : lang = node . get ( "xml:lang" ) or CtsText . DEFAULT_LANG self . metadata . add ( RDF_NAMESPACES . CTS . title , lang = lang , value = node . text ) self . set_title ( node . text , lang ) for node in xml . xpath ( ".//ti:label" , namespaces = XPATH_NAMESPACES ) : lang = node . get ( "xml:lang" ) or CtsText . DEFAULT_LANG self . metadata . add ( RDF_NAMESPACES . CTS . label , lang = lang , value = node . text ) self . set_subject ( node . text , lang ) for node in xml . xpath ( ".//ti:description" , namespaces = XPATH_NAMESPACES ) : lang = node . get ( "xml:lang" ) or CtsText . DEFAULT_LANG self . metadata . add ( RDF_NAMESPACES . CTS . description , lang = lang , value = node . text ) self . set_description ( node . text , lang ) # Need to code that p if not self . citation . is_set ( ) and xml . xpath ( "//ti:citation" , namespaces = XPATH_NAMESPACES ) : self . citation = CtsCollection . XmlCtsCitation . ingest ( xml , xpath = ".//ti:citation[not(ancestor::ti:citation)]" )
7,478
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L155-L186
[ "def", "delete_user", "(", "self", ",", "user", ")", ":", "assert", "self", ".", "user", "==", "'catroot'", "or", "self", ".", "user", "==", "'postgres'", "assert", "not", "user", "==", "'public'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DROP SCHEMA {user} CASCADE;'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'REVOKE USAGE ON SCHEMA public FROM {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'REVOKE SELECT ON ALL TABLES IN SCHEMA public FROM {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'DROP ROLE {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "self", ".", "stdout", ".", "write", "(", "'REMOVED USER {user}\\n'", ".", "format", "(", "user", "=", "user", ")", ")", "if", "self", ".", "connection", "is", "None", ":", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return", "self" ]
Retrieve metadata about the text
def getLabel ( self ) : response = xmlparser ( self . retriever . getLabel ( urn = str ( self . urn ) ) ) self . _parse_request ( response . xpath ( "//ti:reply/ti:label" , namespaces = XPATH_NAMESPACES ) [ 0 ] ) return self . metadata
7,479
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L188-L202
[ "def", "_setup_ipc", "(", "self", ")", ":", "log", ".", "debug", "(", "'Setting up the server IPC puller to receive from the listener'", ")", "self", ".", "ctx", "=", "zmq", ".", "Context", "(", ")", "# subscribe to listener", "self", ".", "sub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PULL", ")", "self", ".", "sub", ".", "bind", "(", "LST_IPC_URL", ")", "try", ":", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "sub", ".", "setsockopt", "(", "zmq", ".", "RCVHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# device publishers", "log", ".", "debug", "(", "'Creating the router ICP on the server'", ")", "self", ".", "pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "ROUTER", ")", "self", ".", "pub", ".", "bind", "(", "DEV_IPC_URL", ")", "try", ":", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# Pipe to the publishers", "self", ".", "publisher_pub", "=", "self", ".", "ctx", ".", "socket", "(", "zmq", ".", "PUB", ")", "self", ".", "publisher_pub", ".", "connect", "(", "PUB_PX_IPC_URL", ")", "try", ":", "self", ".", "publisher_pub", ".", "setsockopt", "(", "zmq", ".", "HWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")", "# zmq 2", "except", "AttributeError", ":", "# zmq 3", "self", ".", "publisher_pub", ".", "setsockopt", "(", "zmq", ".", "SNDHWM", ",", "self", ".", "opts", "[", "'hwm'", "]", ")" ]
Get the previous URN of a reference of the text
def getPrevNextUrn ( self , reference ) : _prev , _next = _SharedMethod . prevnext ( self . retriever . getPrevNextUrn ( urn = "{}:{}" . format ( str ( URN ( str ( self . urn ) ) . upTo ( URN . NO_PASSAGE ) ) , str ( reference ) ) ) ) return _prev , _next
7,480
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L204-L222
[ "def", "connect", "(", "self", ")", ":", "if", "self", ".", "_port", ":", "self", ".", "_driver", "=", "MagDeckDriver", "(", ")", "self", ".", "_driver", ".", "connect", "(", "self", ".", "_port", ")", "self", ".", "_device_info", "=", "self", ".", "_driver", ".", "get_device_info", "(", ")", "else", ":", "# Sanity check: Should never happen, because connect should", "# never be called without a port on Module", "raise", "MissingDevicePortError", "(", "\"MagDeck couldnt connect to port {}\"", ".", "format", "(", "self", ".", "_port", ")", ")" ]
Get the first children URN for a given resource
def getFirstUrn ( self , reference = None ) : if reference is not None : if ":" in reference : urn = reference else : urn = "{}:{}" . format ( str ( URN ( str ( self . urn ) ) . upTo ( URN . NO_PASSAGE ) ) , str ( reference ) ) else : urn = str ( self . urn ) _first = _SharedMethod . firstUrn ( self . retriever . getFirstUrn ( urn ) ) return _first
7,481
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L224-L247
[ "def", "_sendStatCmd", "(", "self", ",", "cmd", ")", ":", "try", ":", "self", ".", "_conn", ".", "write", "(", "\"%s\\r\\n\"", "%", "cmd", ")", "regex", "=", "re", ".", "compile", "(", "'^(END|ERROR)\\r\\n'", ",", "re", ".", "MULTILINE", ")", "(", "idx", ",", "mobj", ",", "text", ")", "=", "self", ".", "_conn", ".", "expect", "(", "[", "regex", ",", "]", ",", "self", ".", "_timeout", ")", "#@UnusedVariable", "except", ":", "raise", "Exception", "(", "\"Communication with %s failed\"", "%", "self", ".", "_instanceName", ")", "if", "mobj", "is", "not", "None", ":", "if", "mobj", ".", "group", "(", "1", ")", "==", "'END'", ":", "return", "text", ".", "splitlines", "(", ")", "[", ":", "-", "1", "]", "elif", "mobj", ".", "group", "(", "1", ")", "==", "'ERROR'", ":", "raise", "Exception", "(", "\"Protocol error in communication with %s.\"", "%", "self", ".", "_instanceName", ")", "else", ":", "raise", "Exception", "(", "\"Connection with %s timed out.\"", "%", "self", ".", "_instanceName", ")" ]
Parse a resource to get the first URN
def firstUrn ( resource ) : resource = xmlparser ( resource ) urn = resource . xpath ( "//ti:reply/ti:urn/text()" , namespaces = XPATH_NAMESPACES , magic_string = True ) if len ( urn ) > 0 : urn = str ( urn [ 0 ] ) return urn . split ( ":" ) [ - 1 ]
7,482
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L274-L287
[ "def", "debug_box_out", "(", "self", ",", "p_min", ":", "Union", "[", "Unit", ",", "Point2", ",", "Point3", "]", ",", "p_max", ":", "Union", "[", "Unit", ",", "Point2", ",", "Point3", "]", ",", "color", "=", "None", ")", ":", "self", ".", "_debug_boxes", ".", "append", "(", "debug_pb", ".", "DebugBox", "(", "min", "=", "self", ".", "to_debug_point", "(", "p_min", ")", ",", "max", "=", "self", ".", "to_debug_point", "(", "p_max", ")", ",", "color", "=", "self", ".", "to_debug_color", "(", "color", ")", ")", ")" ]
Parse a resource to get the prev and next urn
def prevnext ( resource ) : _prev , _next = False , False resource = xmlparser ( resource ) prevnext = resource . xpath ( "//ti:prevnext" , namespaces = XPATH_NAMESPACES ) if len ( prevnext ) > 0 : _next , _prev = None , None prevnext = prevnext [ 0 ] _next_xpath = prevnext . xpath ( "ti:next/ti:urn/text()" , namespaces = XPATH_NAMESPACES , smart_strings = False ) _prev_xpath = prevnext . xpath ( "ti:prev/ti:urn/text()" , namespaces = XPATH_NAMESPACES , smart_strings = False ) if len ( _next_xpath ) : _next = _next_xpath [ 0 ] . split ( ":" ) [ - 1 ] if len ( _prev_xpath ) : _prev = _prev_xpath [ 0 ] . split ( ":" ) [ - 1 ] return _prev , _next
7,483
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L290-L314
[ "def", "get_max_devices_per_port_for_storage_bus", "(", "self", ",", "bus", ")", ":", "if", "not", "isinstance", "(", "bus", ",", "StorageBus", ")", ":", "raise", "TypeError", "(", "\"bus can only be an instance of type StorageBus\"", ")", "max_devices_per_port", "=", "self", ".", "_call", "(", "\"getMaxDevicesPerPortForStorageBus\"", ",", "in_p", "=", "[", "bus", "]", ")", "return", "max_devices_per_port" ]
Previous passage Identifier
def prevId ( self ) : if self . _prev_id is False : # Request the next urn self . _prev_id , self . _next_id = self . getPrevNextUrn ( reference = self . urn . reference ) return self . _prev_id
7,484
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L410-L419
[ "def", "run", "(", "self", ")", ":", "# Create the thread pool.", "executor", "=", "concurrent", ".", "futures", ".", "ThreadPoolExecutor", "(", "max_workers", "=", "self", ".", "_config", "[", "'num_workers'", "]", ")", "# Wait to ensure multiple senders can be synchronised.", "now", "=", "int", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "timestamp", "(", ")", ")", "start_time", "=", "(", "(", "now", "+", "29", ")", "//", "30", ")", "*", "30", "self", ".", "_log", ".", "info", "(", "'Waiting until {}'", ".", "format", "(", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "start_time", ")", ")", ")", "while", "int", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "timestamp", "(", ")", ")", "<", "start_time", ":", "time", ".", "sleep", "(", "0.1", ")", "# Run the event loop.", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "try", ":", "loop", ".", "run_until_complete", "(", "self", ".", "_run_loop", "(", "executor", ")", ")", "except", "KeyboardInterrupt", ":", "pass", "finally", ":", "# Send the end of stream message to each stream.", "self", ".", "_log", ".", "info", "(", "'Shutting down, closing streams...'", ")", "tasks", "=", "[", "]", "for", "stream", ",", "item_group", "in", "self", ".", "_streams", ":", "tasks", ".", "append", "(", "stream", ".", "async_send_heap", "(", "item_group", ".", "get_end", "(", ")", ")", ")", "loop", ".", "run_until_complete", "(", "asyncio", ".", "gather", "(", "*", "tasks", ")", ")", "self", ".", "_log", ".", "info", "(", "'... finished.'", ")", "executor", ".", "shutdown", "(", ")" ]
Shortcut for getting the following passage identifier
def nextId ( self ) : if self . _next_id is False : # Request the next urn self . _prev_id , self . _next_id = self . getPrevNextUrn ( reference = self . urn . reference ) return self . _next_id
7,485
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L431-L440
[ "def", "_sync", "(", "self", ")", ":", "if", "(", "self", ".", "_opcount", ">", "self", ".", "checkpoint_operations", "or", "datetime", ".", "now", "(", ")", ">", "self", ".", "_last_sync", "+", "self", ".", "checkpoint_timeout", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Synchronizing queue metadata.\"", ")", "self", ".", "queue_metadata", ".", "sync", "(", ")", "self", ".", "_last_sync", "=", "datetime", ".", "now", "(", ")", "self", ".", "_opcount", "=", "0", "else", ":", "self", ".", "log", ".", "debug", "(", "\"NOT synchronizing queue metadata.\"", ")" ]
Shortcut for getting the previous and next passage identifier
def siblingsId ( self ) : if self . _next_id is False or self . _prev_id is False : self . _prev_id , self . _next_id = self . getPrevNextUrn ( reference = self . urn . reference ) return self . _prev_id , self . _next_id
7,486
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L443-L451
[ "def", "update_cluster", "(", "cluster_ref", ",", "cluster_spec", ")", ":", "cluster_name", "=", "get_managed_object_name", "(", "cluster_ref", ")", "log", ".", "trace", "(", "'Updating cluster \\'%s\\''", ",", "cluster_name", ")", "try", ":", "task", "=", "cluster_ref", ".", "ReconfigureComputeResource_Task", "(", "cluster_spec", ",", "modify", "=", "True", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "wait_for_task", "(", "task", ",", "cluster_name", ",", "'ClusterUpdateTask'", ")" ]
Given self . resource split information from the CTS API
def _parse ( self ) : self . response = self . resource self . resource = self . resource . xpath ( "//ti:passage/tei:TEI" , namespaces = XPATH_NAMESPACES ) [ 0 ] self . _prev_id , self . _next_id = _SharedMethod . prevnext ( self . response ) if not self . citation . is_set ( ) and len ( self . resource . xpath ( "//ti:citation" , namespaces = XPATH_NAMESPACES ) ) : self . citation = CtsCollection . XmlCtsCitation . ingest ( self . response , xpath = ".//ti:citation[not(ancestor::ti:citation)]" )
7,487
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/resources/texts/remote/cts.py#L453-L467
[ "def", "clean_pip_env", "(", ")", "->", "Generator", "[", "None", ",", "None", ",", "None", "]", ":", "require_venv", "=", "os", ".", "environ", ".", "pop", "(", "PIP_REQUIRE_VIRTUALENV", ",", "None", ")", "try", ":", "yield", "finally", ":", "if", "require_venv", "is", "not", "None", ":", "os", ".", "environ", "[", "PIP_REQUIRE_VIRTUALENV", "]", "=", "require_venv" ]
Gets a authorization token for session reuse .
def get_user_token ( self ) : headers = { 'User-Agent' : self . user_agent ( ) , 'Host' : self . domain ( ) , 'Accept' : '*/*' , } headers . update ( self . headers ( ) ) r = requests . get ( self . portals_url ( ) + '/users/_this/token' , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : return r . text else : print ( "get_user_token: Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( )
7,488
https://github.com/exosite-labs/pyonep/blob/d27b621b00688a542e0adcc01f3e3354c05238a1/pyonep/portals/endpoints.py#L125-L144
[ "def", "setOverlayTransformOverlayRelative", "(", "self", ",", "ulOverlayHandle", ",", "ulOverlayHandleParent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformOverlayRelative", "pmatParentOverlayToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "ulOverlayHandleParent", ",", "byref", "(", "pmatParentOverlayToOverlayTransform", ")", ")", "return", "result", ",", "pmatParentOverlayToOverlayTransform" ]
Returns device object of newly created device .
def add_device ( self , model , serial ) : device = { 'model' : model , 'vendor' : self . vendor ( ) , 'sn' : serial , 'type' : 'vendor' } headers = { 'User-Agent' : self . user_agent ( ) , } headers . update ( self . headers ( ) ) r = requests . post ( self . portals_url ( ) + '/portals/' + self . portal_id ( ) + '/devices' , data = json . dumps ( device ) , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . ADDED == r . status_code : # fix the 'meta' to be dictionary instead of string device_obj = r . json ( ) return dictify_device_meta ( device_obj ) else : print ( "add_device: Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( )
7,489
https://github.com/exosite-labs/pyonep/blob/d27b621b00688a542e0adcc01f3e3354c05238a1/pyonep/portals/endpoints.py#L212-L242
[ "def", "amazon_s3_url", "(", "self", ",", "sat", ",", "band", ")", ":", "if", "band", "!=", "'MTL'", ":", "filename", "=", "'%s_B%s.TIF'", "%", "(", "sat", "[", "'scene'", "]", ",", "band", ")", "else", ":", "filename", "=", "'%s_%s.txt'", "%", "(", "sat", "[", "'scene'", "]", ",", "band", ")", "return", "url_builder", "(", "[", "self", ".", "s3", ",", "sat", "[", "'sat'", "]", ",", "sat", "[", "'path'", "]", ",", "sat", "[", "'row'", "]", ",", "sat", "[", "'scene'", "]", ",", "filename", "]", ")" ]
Implements the Update device Portals API .
def update_portal ( self , portal_obj ) : headers = { 'User-Agent' : self . user_agent ( ) , } headers . update ( self . headers ( ) ) r = requests . put ( self . portals_url ( ) + '/portals/' + self . portal_id ( ) , data = json . dumps ( portal_obj ) , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : return r . json ( ) else : print ( "update_portal: Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( )
7,490
https://github.com/exosite-labs/pyonep/blob/d27b621b00688a542e0adcc01f3e3354c05238a1/pyonep/portals/endpoints.py#L272-L294
[ "def", "from_dict", "(", "cls", ",", "dictionary", ")", ":", "cookbooks", "=", "set", "(", ")", "sources", "=", "set", "(", ")", "other", "=", "set", "(", ")", "# put these in order", "groups", "=", "[", "sources", ",", "cookbooks", ",", "other", "]", "for", "key", ",", "val", "in", "dictionary", ".", "items", "(", ")", ":", "if", "key", "==", "'cookbook'", ":", "cookbooks", ".", "update", "(", "{", "cls", ".", "cookbook_statement", "(", "cbn", ",", "meta", ")", "for", "cbn", ",", "meta", "in", "val", ".", "items", "(", ")", "}", ")", "elif", "key", "==", "'source'", ":", "sources", ".", "update", "(", "{", "\"source '%s'\"", "%", "src", "for", "src", "in", "val", "}", ")", "elif", "key", "==", "'metadata'", ":", "other", ".", "add", "(", "'metadata'", ")", "body", "=", "''", "for", "group", "in", "groups", ":", "if", "group", ":", "body", "+=", "'\\n'", "body", "+=", "'\\n'", ".", "join", "(", "group", ")", "return", "cls", ".", "from_string", "(", "body", ")" ]
Retrieve the device object for a given RID .
def get_device ( self , rid ) : headers = { 'User-Agent' : self . user_agent ( ) , 'Content-Type' : self . content_type ( ) } headers . update ( self . headers ( ) ) url = self . portals_url ( ) + '/devices/' + rid # print("URL: {0}".format(url)) r = requests . get ( url , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : # fix the 'meta' to be dictionary instead of string device_obj = r . json ( ) # device_obj['info']['description']['meta'] = \ # json.loads(device_obj['info']['description']['meta']) return device_obj else : print ( "get_device: Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( )
7,491
https://github.com/exosite-labs/pyonep/blob/d27b621b00688a542e0adcc01f3e3354c05238a1/pyonep/portals/endpoints.py#L296-L324
[ "def", "_get_manifest_list", "(", "self", ",", "image", ")", ":", "if", "image", "in", "self", ".", "manifest_list_cache", ":", "return", "self", ".", "manifest_list_cache", "[", "image", "]", "manifest_list", "=", "get_manifest_list", "(", "image", ",", "image", ".", "registry", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "if", "'@sha256:'", "in", "str", "(", "image", ")", "and", "not", "manifest_list", ":", "# we want to adjust the tag only for manifest list fetching", "image", "=", "image", ".", "copy", "(", ")", "try", ":", "config_blob", "=", "get_config_from_registry", "(", "image", ",", "image", ".", "registry", ",", "image", ".", "tag", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "except", "(", "HTTPError", ",", "RetryError", ",", "Timeout", ")", "as", "ex", ":", "self", ".", "log", ".", "warning", "(", "'Unable to fetch config for %s, got error %s'", ",", "image", ",", "ex", ".", "response", ".", "status_code", ")", "raise", "RuntimeError", "(", "'Unable to fetch config for base image'", ")", "release", "=", "config_blob", "[", "'config'", "]", "[", "'Labels'", "]", "[", "'release'", "]", "version", "=", "config_blob", "[", "'config'", "]", "[", "'Labels'", "]", "[", "'version'", "]", "docker_tag", "=", "\"%s-%s\"", "%", "(", "version", ",", "release", ")", "image", ".", "tag", "=", "docker_tag", "manifest_list", "=", "get_manifest_list", "(", "image", ",", "image", ".", "registry", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "self", ".", "manifest_list_cache", "[", "image", "]", "=", "manifest_list", "return", "self", ".", "manifest_list_cache", "[", "image", "]" ]
Implements the Get Multiple Devices API .
def get_multiple_devices ( self , rids ) : headers = { 'User-Agent' : self . user_agent ( ) , 'Content-Type' : self . content_type ( ) } headers . update ( self . headers ( ) ) url = self . portals_url ( ) + '/users/_this/devices/' + str ( rids ) . replace ( "'" , "" ) . replace ( ' ' , '' ) # print("URL: {0}".format(url)) r = requests . get ( url , headers = headers , auth = self . auth ( ) ) if HTTP_STATUS . OK == r . status_code : # TODO: loop through all rids and fix 'meta' to be dict like add_device and get_device do return r . json ( ) else : print ( "get_multiple_devices: Something went wrong: <{0}>: {1}" . format ( r . status_code , r . reason ) ) r . raise_for_status ( )
7,492
https://github.com/exosite-labs/pyonep/blob/d27b621b00688a542e0adcc01f3e3354c05238a1/pyonep/portals/endpoints.py#L326-L352
[ "def", "get_url_path", "(", "self", ",", "basedir", ",", "original_basename", ",", "ext", ",", "name", ",", "obscure", "=", "True", ")", ":", "try", ":", "hash", "=", "hashlib", ".", "sha1", "(", "smart_str", "(", "name", ")", ")", ".", "hexdigest", "(", ")", "except", "TypeError", ":", "hash", "=", "hashlib", ".", "sha1", "(", "smart_str", "(", "name", ")", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", "# figure out where the watermark would be saved on the filesystem", "if", "obscure", "is", "True", ":", "logger", ".", "debug", "(", "'Obscuring original image name: %s => %s'", "%", "(", "name", ",", "hash", ")", ")", "url_path", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "hash", "+", "ext", ")", "else", ":", "logger", ".", "debug", "(", "'Not obscuring original image name.'", ")", "url_path", "=", "os", ".", "path", ".", "join", "(", "basedir", ",", "hash", ",", "original_basename", "+", "ext", ")", "# make sure the destination directory exists", "try", ":", "fpath", "=", "self", ".", "_get_filesystem_path", "(", "url_path", ")", "os", ".", "makedirs", "(", "os", ".", "path", ".", "dirname", "(", "fpath", ")", ")", "except", "OSError", "as", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "EEXIST", ":", "pass", "# not to worry, directory exists", "else", ":", "logger", ".", "error", "(", "'Error creating path: %s'", "%", "traceback", ".", "format_exc", "(", ")", ")", "raise", "else", ":", "logger", ".", "debug", "(", "'Created directory: %s'", "%", "os", ".", "path", ".", "dirname", "(", "fpath", ")", ")", "return", "url_path" ]
calculate thermal pressure for Dorogokupets 2015 EOS
def dorogokupets2015_pth ( v , temp , v0 , gamma0 , gamma_inf , beta , theta01 , m1 , theta02 , m2 , n , z , t_ref = 300. , three_r = 3. * constants . R ) : # x = v / v0 # a = a0 * np.power(x, m) v_mol = vol_uc2mol ( v , z ) gamma = altshuler_grun ( v , v0 , gamma0 , gamma_inf , beta ) theta1 = altshuler_debyetemp ( v , v0 , gamma0 , gamma_inf , beta , theta01 ) theta2 = altshuler_debyetemp ( v , v0 , gamma0 , gamma_inf , beta , theta02 ) if isuncertainties ( [ v , temp , v0 , gamma0 , gamma_inf , beta , theta01 , m1 , theta02 , m2 ] ) : term_h1 = m1 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta1 / ( unp . exp ( theta1 / temp ) - 1. ) ) term_h2 = m2 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta2 / ( unp . exp ( theta2 / temp ) - 1. ) ) term_h1_ref = m1 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta1 / ( unp . exp ( theta1 / t_ref ) - 1. ) ) term_h2_ref = m2 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta2 / ( unp . exp ( theta2 / t_ref ) - 1. ) ) else : term_h1 = m1 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta1 / ( np . exp ( theta1 / temp ) - 1. ) ) term_h2 = m2 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta2 / ( np . exp ( theta2 / temp ) - 1. ) ) term_h1_ref = m1 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta1 / ( np . exp ( theta1 / t_ref ) - 1. ) ) term_h2_ref = m2 / ( m1 + m2 ) * three_r * n * gamma / v_mol * ( theta2 / ( np . exp ( theta2 / t_ref ) - 1. ) ) p_th = term_h1 * 1.e-9 + term_h2 * 1.e-9 p_th_ref = term_h1_ref * 1.e-9 + term_h2_ref * 1.e-9 return ( p_th - p_th_ref )
7,493
https://github.com/SHDShim/pytheos/blob/be079624405e92fbec60c5ead253eb5917e55237/pytheos/eqn_therm_Dorogokupets2015.py#L9-L59
[ "def", "_roundSlist", "(", "slist", ")", ":", "slist", "[", "-", "1", "]", "=", "60", "if", "slist", "[", "-", "1", "]", ">=", "30", "else", "0", "for", "i", "in", "range", "(", "len", "(", "slist", ")", "-", "1", ",", "1", ",", "-", "1", ")", ":", "if", "slist", "[", "i", "]", "==", "60", ":", "slist", "[", "i", "]", "=", "0", "slist", "[", "i", "-", "1", "]", "+=", "1", "return", "slist", "[", ":", "-", "1", "]" ]
Retrieves the main routes of the DTS Collection
def routes ( self ) : if self . _routes : return self . _routes request = requests . get ( self . endpoint ) request . raise_for_status ( ) data = request . json ( ) self . _routes = { "collections" : parse_uri ( data [ "collections" ] , self . endpoint ) , "documents" : parse_uri ( data [ "documents" ] , self . endpoint ) , "navigation" : parse_uri ( data [ "navigation" ] , self . endpoint ) } return self . _routes
7,494
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/retrievers/dts/__init__.py#L76-L103
[ "def", "_watch_progress", "(", "handler", ")", ":", "with", "_tmpdir_scope", "(", ")", "as", "tmpdir", ":", "socket_filename", "=", "os", ".", "path", ".", "join", "(", "tmpdir", ",", "'sock'", ")", "sock", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_STREAM", ")", "with", "contextlib", ".", "closing", "(", "sock", ")", ":", "sock", ".", "bind", "(", "socket_filename", ")", "sock", ".", "listen", "(", "1", ")", "child", "=", "gevent", ".", "spawn", "(", "_do_watch_progress", ",", "socket_filename", ",", "sock", ",", "handler", ")", "try", ":", "yield", "socket_filename", "except", ":", "gevent", ".", "kill", "(", "child", ")", "raise" ]
Makes a call on the Collection API
def get_collection ( self , collection_id = None , nav = "children" , page = None ) : return self . call ( "collections" , { "id" : collection_id , "nav" : nav , "page" : page } , defaults = { "id" : None , "nav" : "children" , "page" : 1 } )
7,495
https://github.com/Capitains/MyCapytain/blob/b11bbf6b6ae141fc02be70471e3fbf6907be6593/MyCapytain/retrievers/dts/__init__.py#L105-L126
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Create the glance database
def _create_glance_db ( self , root_db_pass , glance_db_pass ) : print red ( env . host_string + ' | Create glance database' ) sudo ( "mysql -uroot -p{0} -e \"CREATE DATABASE glance;\"" . format ( root_db_pass ) , shell = False ) sudo ( "mysql -uroot -p{0} -e \"GRANT ALL PRIVILEGES ON glance.* TO 'glance'@'localhost' IDENTIFIED BY '{1}';\"" . format ( root_db_pass , glance_db_pass ) , shell = False ) sudo ( "mysql -uroot -p{0} -e \"GRANT ALL PRIVILEGES ON glance.* TO 'glance'@'%' IDENTIFIED BY '{1}';\"" . format ( root_db_pass , glance_db_pass ) , shell = False )
7,496
https://github.com/jiasir/playback/blob/58b2a5d669dcfaa8cad50c544a4b068dcacf9b69/playback/glance.py#L74-L82
[ "def", "handle_message", "(", "self", ",", "msg", ")", ":", "if", "msg", ".", "msg_id", "not", "in", "self", ".", "msg_types", ":", "self", ".", "report_message_type", "(", "msg", ")", "self", ".", "msg_types", ".", "add", "(", "msg", ".", "msg_id", ")", "self", ".", "tc", ".", "message", "(", "'inspection'", ",", "typeId", "=", "msg", ".", "msg_id", ",", "message", "=", "msg", ".", "msg", ",", "file", "=", "os", ".", "path", ".", "relpath", "(", "msg", ".", "abspath", ")", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", ",", "line", "=", "str", "(", "msg", ".", "line", ")", ",", "SEVERITY", "=", "TC_SEVERITY", ".", "get", "(", "msg", ".", "category", ")", ")" ]
Return a new CancelToken chaining this and the given token .
def chain ( self , token : 'CancelToken' ) -> 'CancelToken' : if self . loop != token . _loop : raise EventLoopMismatch ( "Chained CancelToken objects must be on the same event loop" ) chain_name = ":" . join ( [ self . name , token . name ] ) chain = CancelToken ( chain_name , loop = self . loop ) chain . _chain . extend ( [ self , token ] ) return chain
7,497
https://github.com/ethereum/asyncio-cancel-token/blob/135395a1a396c50731c03cf570e267c47c612694/cancel_token/token.py#L33-L46
[ "def", "get_hash", "(", "self", ",", "length", "=", "HASH_LENGTH", ")", ":", "data_hash", "=", "\"\"", "if", "not", "self", ".", "data_str", ":", "return", "data_hash", "encoded_data_str", "=", "self", ".", "data_str", "if", "sys", ".", "version_info", ".", "major", "==", "2", ":", "# In Py2, only unicode needs to be encoded.", "if", "isinstance", "(", "self", ".", "data_str", ",", "unicode", ")", ":", "encoded_data_str", "=", "self", ".", "data_str", ".", "encode", "(", "'utf-8'", ")", "else", ":", "# data_str should always be unicode on python 3", "encoded_data_str", "=", "self", ".", "data_str", ".", "encode", "(", "'utf-8'", ")", "data_hash", "=", "hashlib", ".", "sha1", "(", "encoded_data_str", ")", ".", "hexdigest", "(", ")", "return", "data_hash", "[", ":", "length", "]" ]
Return the token which was triggered .
def triggered_token ( self ) -> 'CancelToken' : if self . _triggered . is_set ( ) : return self for token in self . _chain : if token . triggered : # Use token.triggered_token here to make the lookup recursive as self._chain may # contain other chains. return token . triggered_token return None
7,498
https://github.com/ethereum/asyncio-cancel-token/blob/135395a1a396c50731c03cf570e267c47c612694/cancel_token/token.py#L55-L68
[ "def", "setOverlayTransformOverlayRelative", "(", "self", ",", "ulOverlayHandle", ",", "ulOverlayHandleParent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformOverlayRelative", "pmatParentOverlayToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "ulOverlayHandleParent", ",", "byref", "(", "pmatParentOverlayToOverlayTransform", ")", ")", "return", "result", ",", "pmatParentOverlayToOverlayTransform" ]
Return True or False whether this token has been triggered .
def triggered ( self ) -> bool : if self . _triggered . is_set ( ) : return True return any ( token . triggered for token in self . _chain )
7,499
https://github.com/ethereum/asyncio-cancel-token/blob/135395a1a396c50731c03cf570e267c47c612694/cancel_token/token.py#L71-L77
[ "def", "_check_rest_version", "(", "self", ",", "version", ")", ":", "version", "=", "str", "(", "version", ")", "if", "version", "not", "in", "self", ".", "supported_rest_versions", ":", "msg", "=", "\"Library is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "array_rest_versions", "=", "self", ".", "_list_available_rest_versions", "(", ")", "if", "version", "not", "in", "array_rest_versions", ":", "msg", "=", "\"Array is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "return", "LooseVersion", "(", "version", ")" ]