query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Returns the the entry point object given a section option pair .
def entrypoint ( section , option ) : try : return entrypoints ( section ) [ option ] except KeyError : raise KeyError ( 'Cannot resolve type "{}" to a recognised vsgen "{}" type.' . format ( option , section ) )
1,900
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/entrypoints.py#L22-L33
[ "def", "_shape_list", "(", "tensor", ")", ":", "# Get statically known shape (may contain None's for unknown dimensions)", "shape", "=", "tensor", ".", "get_shape", "(", ")", ".", "as_list", "(", ")", "# Ensure that the shape values are not None", "dynamic_shape", "=", "tf", ".", "shape", "(", "tensor", ")", "for", "i", "in", "range", "(", "len", "(", "shape", ")", ")", ":", "if", "shape", "[", "i", "]", "is", "None", ":", "shape", "[", "i", "]", "=", "dynamic_shape", "[", "i", "]", "return", "shape" ]
Retrieves any declared information from the given macaroons and returns it as a key - value map . Information is declared with a first party caveat as created by declared_caveat .
def infer_declared ( ms , namespace = None ) : conditions = [ ] for m in ms : for cav in m . caveats : if cav . location is None or cav . location == '' : conditions . append ( cav . caveat_id_bytes . decode ( 'utf-8' ) ) return infer_declared_from_conditions ( conditions , namespace )
1,901
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_declared.py#L15-L32
[ "def", "PopEvents", "(", "self", ")", ":", "event", "=", "self", ".", "PopEvent", "(", ")", "while", "event", ":", "yield", "event", "event", "=", "self", ".", "PopEvent", "(", ")" ]
like infer_declared except that it is passed a set of first party caveat conditions as a list of string rather than a set of macaroons .
def infer_declared_from_conditions ( conds , namespace = None ) : conflicts = [ ] # If we can't resolve that standard namespace, then we'll look for # just bare "declared" caveats which will work OK for legacy # macaroons with no namespace. if namespace is None : namespace = Namespace ( ) prefix = namespace . resolve ( STD_NAMESPACE ) if prefix is None : prefix = '' declared_cond = prefix + COND_DECLARED info = { } for cond in conds : try : name , rest = parse_caveat ( cond ) except ValueError : name , rest = '' , '' if name != declared_cond : continue parts = rest . split ( ' ' , 1 ) if len ( parts ) != 2 : continue key , val = parts [ 0 ] , parts [ 1 ] old_val = info . get ( key ) if old_val is not None and old_val != val : conflicts . append ( key ) continue info [ key ] = val for key in set ( conflicts ) : del info [ key ] return info
1,902
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_declared.py#L35-L69
[ "def", "progress", "(", "self", ",", "p", ")", ":", "self", ".", "task_stack", "[", "-", "1", "]", "=", "self", ".", "task_stack", "[", "-", "1", "]", ".", "_replace", "(", "progress", "=", "p", ")", "self", ".", "progress_report", "(", ")" ]
Injects functions before the activation routine of child classes gets called
def _pre_activate_injection ( self ) : # Let's be sure that this plugins class is registered and available on application level under # application.plugins.classes. This allows to reuse this class for *new* plugins. if not self . app . plugins . classes . exist ( self . __class__ . __name__ ) : self . app . plugins . classes . register ( [ self . __class__ ] ) self . _load_needed_plugins ( ) self . app . signals . send ( "plugin_activate_pre" , self )
1,903
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_base_pattern.py#L135-L146
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Registers a new signal . Only registered signals are allowed to be send .
def register ( self , signal , description ) : return self . __app . signals . register ( signal , self . _plugin , description )
1,904
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_base_pattern.py#L248-L257
[ "def", "_load", "(", "self", ",", "keyframe", "=", "True", ")", ":", "if", "self", ".", "_cached", ":", "return", "pages", "=", "self", ".", "pages", "if", "not", "pages", ":", "return", "if", "not", "self", ".", "_indexed", ":", "self", ".", "_seek", "(", "-", "1", ")", "if", "not", "self", ".", "_cache", ":", "return", "fh", "=", "self", ".", "parent", ".", "filehandle", "if", "keyframe", "is", "not", "None", ":", "keyframe", "=", "self", ".", "_keyframe", "for", "i", ",", "page", "in", "enumerate", "(", "pages", ")", ":", "if", "isinstance", "(", "page", ",", "inttypes", ")", ":", "fh", ".", "seek", "(", "page", ")", "page", "=", "self", ".", "_tiffpage", "(", "self", ".", "parent", ",", "index", "=", "i", ",", "keyframe", "=", "keyframe", ")", "pages", "[", "i", "]", "=", "page", "self", ".", "_cached", "=", "True" ]
Returns a single signal or a dictionary of signals for this plugin .
def get ( self , signal = None ) : return self . __app . signals . get ( signal , self . _plugin )
1,905
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_base_pattern.py#L294-L298
[ "def", "post_log_artifacts", "(", "job_log", ")", ":", "logger", ".", "debug", "(", "\"Downloading/parsing log for log %s\"", ",", "job_log", ".", "id", ")", "try", ":", "artifact_list", "=", "extract_text_log_artifacts", "(", "job_log", ")", "except", "LogSizeException", "as", "e", ":", "job_log", ".", "update_status", "(", "JobLog", ".", "SKIPPED_SIZE", ")", "logger", ".", "warning", "(", "'Skipping parsing log for %s: %s'", ",", "job_log", ".", "id", ",", "e", ")", "return", "except", "Exception", "as", "e", ":", "job_log", ".", "update_status", "(", "JobLog", ".", "FAILED", ")", "# Unrecoverable http error (doesn't exist or permission denied).", "# Apparently this can happen somewhat often with taskcluster if", "# the job fails (bug 1154248), so just warn rather than raising,", "# to prevent the noise/load from retrying.", "if", "isinstance", "(", "e", ",", "HTTPError", ")", "and", "e", ".", "response", ".", "status_code", "in", "(", "403", ",", "404", ")", ":", "logger", ".", "warning", "(", "\"Unable to retrieve log for %s: %s\"", ",", "job_log", ".", "id", ",", "e", ")", "return", "logger", ".", "error", "(", "\"Failed to download/parse log for %s: %s\"", ",", "job_log", ".", "id", ",", "e", ")", "raise", "try", ":", "serialized_artifacts", "=", "serialize_artifact_json_blobs", "(", "artifact_list", ")", "store_job_artifacts", "(", "serialized_artifacts", ")", "job_log", ".", "update_status", "(", "JobLog", ".", "PARSED", ")", "logger", ".", "debug", "(", "\"Stored artifact for %s %s\"", ",", "job_log", ".", "job", ".", "repository", ".", "name", ",", "job_log", ".", "job", ".", "id", ")", "except", "Exception", "as", "e", ":", "logger", ".", "error", "(", "\"Failed to store parsed artifact for %s: %s\"", ",", "job_log", ".", "id", ",", "e", ")", "raise" ]
Returns a single receiver or a dictionary of receivers for this plugin .
def get_receiver ( self , receiver = None ) : return self . __app . signals . get_receiver ( receiver , self . _plugin )
1,906
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_base_pattern.py#L300-L304
[ "def", "_makeComplementTable", "(", "complementData", ")", ":", "table", "=", "list", "(", "range", "(", "256", ")", ")", "for", "_from", ",", "to", "in", "complementData", ".", "items", "(", ")", ":", "table", "[", "ord", "(", "_from", "[", "0", "]", ".", "lower", "(", ")", ")", "]", "=", "ord", "(", "to", "[", "0", "]", ".", "lower", "(", ")", ")", "table", "[", "ord", "(", "_from", "[", "0", "]", ".", "upper", "(", ")", ")", "]", "=", "ord", "(", "to", "[", "0", "]", ".", "upper", "(", ")", ")", "return", "''", ".", "join", "(", "map", "(", "chr", ",", "table", ")", ")" ]
Check attributes .
def validate ( mcs , bases , attributes ) : if bases [ 0 ] is object : return None mcs . check_model_cls ( attributes ) mcs . check_include_exclude ( attributes ) mcs . check_properties ( attributes )
1,907
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/views.py#L18-L24
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Return tuple of names of defined properties .
def get_properties ( attributes ) : return [ key for key , value in six . iteritems ( attributes ) if isinstance ( value , property ) ]
1,908
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/views.py#L76-L83
[ "def", "wrap_conn", "(", "conn_func", ")", ":", "def", "call", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "conn", "=", "conn_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "cursor_func", "=", "getattr", "(", "conn", ",", "CURSOR_WRAP_METHOD", ")", "wrapped", "=", "wrap_cursor", "(", "cursor_func", ")", "setattr", "(", "conn", ",", "cursor_func", ".", "__name__", ",", "wrapped", ")", "return", "conn", "except", "Exception", ":", "# pragma: NO COVER", "logging", ".", "warning", "(", "'Fail to wrap conn, mysql not traced.'", ")", "return", "conn_func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "call" ]
Check whether intersections exist .
def check_properties ( mcs , attributes ) : include , exclude = mcs . get_prepared_include_exclude ( attributes ) properties = mcs . get_properties ( attributes ) intersections = list ( set ( properties ) . intersection ( include if include else exclude ) ) if not intersections : return None attr_name = '__include__' if include else '__exclude__' raise AttributeError ( "It is not allowed to mention already defined properties: " "{0} in {1} attributes." . format ( ", " . join ( intersections ) , attr_name ) )
1,909
https://github.com/ets-labs/python-domain-models/blob/7de1816ba0338f20fdb3e0f57fad0ffd5bea13f9/domain_models/views.py#L86-L103
[ "def", "promote_owner", "(", "self", ",", "stream_id", ",", "user_id", ")", ":", "req_hook", "=", "'pod/v1/room/'", "+", "stream_id", "+", "'/membership/promoteOwner'", "req_args", "=", "'{ \"id\": %s }'", "%", "user_id", "status_code", ",", "response", "=", "self", ".", "__rest__", ".", "POST_query", "(", "req_hook", ",", "req_args", ")", "self", ".", "logger", ".", "debug", "(", "'%s: %s'", "%", "(", "status_code", ",", "response", ")", ")", "return", "status_code", ",", "response" ]
Event Handler when a file is deleted
def on_deleted ( self , event ) : key = 'filesystem:file_deleted' data = { 'filepath' : event . src_path , 'is_directory' : event . is_directory , 'dirpath' : os . path . dirname ( event . src_path ) } bmsg = BroadcastMessage ( key = key , data = data ) BroadcastManager . broadcast ( bmsg )
1,910
https://github.com/SandstoneHPC/sandstone-ide/blob/7a47947fb07281c3e3018042863dc67e7e56dc04/sandstone/lib/filesystem/filewatcher.py#L27-L39
[ "def", "from_cfunits", "(", "cls", ",", "units", ")", "->", "'Date'", ":", "try", ":", "string", "=", "units", "[", "units", ".", "find", "(", "'since'", ")", "+", "6", ":", "]", "idx", "=", "string", ".", "find", "(", "'.'", ")", "if", "idx", "!=", "-", "1", ":", "jdx", "=", "None", "for", "jdx", ",", "char", "in", "enumerate", "(", "string", "[", "idx", "+", "1", ":", "]", ")", ":", "if", "not", "char", ".", "isnumeric", "(", ")", ":", "break", "if", "char", "!=", "'0'", ":", "raise", "ValueError", "(", "'No other decimal fraction of a second '", "'than \"0\" allowed.'", ")", "else", ":", "if", "jdx", "is", "None", ":", "jdx", "=", "idx", "+", "1", "else", ":", "jdx", "+=", "1", "string", "=", "f'{string[:idx]}{string[idx+jdx+1:]}'", "return", "cls", "(", "string", ")", "except", "BaseException", ":", "objecttools", ".", "augment_excmessage", "(", "f'While trying to parse the date of the NetCDF-CF \"units\" '", "f'string `{units}`'", ")" ]
Loads agent authentication information from the specified content string as read from an agents file . The returned information is suitable for passing as an argument to the AgentInteractor constructor .
def read_auth_info ( agent_file_content ) : try : data = json . loads ( agent_file_content ) return AuthInfo ( key = bakery . PrivateKey . deserialize ( data [ 'key' ] [ 'private' ] ) , agents = list ( Agent ( url = a [ 'url' ] , username = a [ 'username' ] ) for a in data . get ( 'agents' , [ ] ) ) , ) except ( KeyError , ValueError , TypeError , ) as e : raise AgentFileFormatError ( 'invalid agent file' , e )
1,911
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/agent/_agent.py#L37-L60
[ "def", "future_set_exception_unless_cancelled", "(", "future", ":", "\"Union[futures.Future[_T], Future[_T]]\"", ",", "exc", ":", "BaseException", ")", "->", "None", ":", "if", "not", "future", ".", "cancelled", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "else", ":", "app_log", ".", "error", "(", "\"Exception after Future was cancelled\"", ",", "exc_info", "=", "exc", ")" ]
Implement Interactor . interact by obtaining obtaining a macaroon from the discharger discharging it with the local private key using the discharged macaroon as a discharge token
def interact ( self , client , location , interaction_required_err ) : p = interaction_required_err . interaction_method ( 'agent' , InteractionInfo ) if p . login_url is None or p . login_url == '' : raise httpbakery . InteractionError ( 'no login-url field found in agent interaction method' ) agent = self . _find_agent ( location ) if not location . endswith ( '/' ) : location += '/' login_url = urljoin ( location , p . login_url ) resp = requests . get ( login_url , params = { 'username' : agent . username , 'public-key' : str ( self . _auth_info . key . public_key ) } , auth = client . auth ( ) ) if resp . status_code != 200 : raise httpbakery . InteractionError ( 'cannot acquire agent macaroon: {} {}' . format ( resp . status_code , resp . text ) ) m = resp . json ( ) . get ( 'macaroon' ) if m is None : raise httpbakery . InteractionError ( 'no macaroon in response' ) m = bakery . Macaroon . from_dict ( m ) ms = bakery . discharge_all ( m , None , self . _auth_info . key ) b = bytearray ( ) for m in ms : b . extend ( utils . b64decode ( m . serialize ( ) ) ) return httpbakery . DischargeToken ( kind = 'agent' , value = bytes ( b ) )
1,912
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/agent/_agent.py#L98-L130
[ "def", "expand_filename_pattern", "(", "self", ",", "pattern", ",", "base_dir", ",", "sourcefile", "=", "None", ")", ":", "# replace vars like ${benchmark_path},", "# with converting to list and back, we can use the function 'substitute_vars()'", "expandedPattern", "=", "substitute_vars", "(", "[", "pattern", "]", ",", "self", ",", "sourcefile", ")", "assert", "len", "(", "expandedPattern", ")", "==", "1", "expandedPattern", "=", "expandedPattern", "[", "0", "]", "if", "expandedPattern", "!=", "pattern", ":", "logging", ".", "debug", "(", "\"Expanded variables in expression %r to %r.\"", ",", "pattern", ",", "expandedPattern", ")", "fileList", "=", "util", ".", "expand_filename_pattern", "(", "expandedPattern", ",", "base_dir", ")", "# sort alphabetical,", "fileList", ".", "sort", "(", ")", "if", "not", "fileList", ":", "logging", ".", "warning", "(", "\"No files found matching %r.\"", ",", "pattern", ")", "return", "fileList" ]
Implement LegacyInteractor . legacy_interact by obtaining the discharge macaroon using the client s private key
def legacy_interact ( self , client , location , visit_url ) : agent = self . _find_agent ( location ) # Shallow-copy the client so that we don't unexpectedly side-effect # it by changing the key. Another possibility might be to # set up agent authentication differently, in such a way that # we're sure that client.key is the same as self._auth_info.key. client = copy . copy ( client ) client . key = self . _auth_info . key resp = client . request ( method = 'POST' , url = visit_url , json = { 'username' : agent . username , 'public_key' : str ( self . _auth_info . key . public_key ) , } , ) if resp . status_code != 200 : raise httpbakery . InteractionError ( 'cannot acquire agent macaroon from {}: {} (response body: {!r})' . format ( visit_url , resp . status_code , resp . text ) ) if not resp . json ( ) . get ( 'agent_login' , False ) : raise httpbakery . InteractionError ( 'agent login failed' )
1,913
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/httpbakery/agent/_agent.py#L143-L166
[ "def", "expand_filename_pattern", "(", "self", ",", "pattern", ",", "base_dir", ",", "sourcefile", "=", "None", ")", ":", "# replace vars like ${benchmark_path},", "# with converting to list and back, we can use the function 'substitute_vars()'", "expandedPattern", "=", "substitute_vars", "(", "[", "pattern", "]", ",", "self", ",", "sourcefile", ")", "assert", "len", "(", "expandedPattern", ")", "==", "1", "expandedPattern", "=", "expandedPattern", "[", "0", "]", "if", "expandedPattern", "!=", "pattern", ":", "logging", ".", "debug", "(", "\"Expanded variables in expression %r to %r.\"", ",", "pattern", ",", "expandedPattern", ")", "fileList", "=", "util", ".", "expand_filename_pattern", "(", "expandedPattern", ",", "base_dir", ")", "# sort alphabetical,", "fileList", ".", "sort", "(", ")", "if", "not", "fileList", ":", "logging", ".", "warning", "(", "\"No files found matching %r.\"", ",", "pattern", ")", "return", "fileList" ]
Returns the minimum time of any time - before caveats found in the given list or None if no such caveats were found .
def expiry_time ( ns , cavs ) : prefix = ns . resolve ( STD_NAMESPACE ) time_before_cond = condition_with_prefix ( prefix , COND_TIME_BEFORE ) t = None for cav in cavs : if not cav . first_party ( ) : continue cav = cav . caveat_id_bytes . decode ( 'utf-8' ) name , rest = parse_caveat ( cav ) if name != time_before_cond : continue try : et = pyrfc3339 . parse ( rest , utc = True ) . replace ( tzinfo = None ) if t is None or et < t : t = et except ValueError : continue return t
1,914
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_time.py#L40-L67
[ "def", "reinverted", "(", "n", ",", "r", ")", ":", "result", "=", "0", "r", "=", "1", "<<", "(", "r", "-", "1", ")", "while", "n", ":", "if", "not", "n", "&", "1", ":", "result", "|=", "r", "r", ">>=", "1", "n", ">>=", "1", "if", "r", ":", "result", "|=", "(", "r", "<<", "1", ")", "-", "1", "return", "result" ]
Replace multiple strings in a text .
def replace_all ( text , replace_dict ) : for i , j in replace_dict . items ( ) : text = text . replace ( i , j ) return text
1,915
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/tools.py#L16-L36
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
Map the function on param or apply it depending whether param \ is a list or an item .
def map_or_apply ( function , param ) : try : if isinstance ( param , list ) : return [ next ( iter ( function ( i ) ) ) for i in param ] else : return next ( iter ( function ( param ) ) ) except StopIteration : return None
1,916
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/tools.py#L39-L54
[ "def", "_merge_meta_data", "(", "cls", ",", "first", ":", "\"HistogramBase\"", ",", "second", ":", "\"HistogramBase\"", ")", "->", "dict", ":", "keys", "=", "set", "(", "first", ".", "_meta_data", ".", "keys", "(", ")", ")", "keys", "=", "keys", ".", "union", "(", "set", "(", "second", ".", "_meta_data", ".", "keys", "(", ")", ")", ")", "return", "{", "key", ":", "(", "first", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "if", "first", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "==", "second", ".", "_meta_data", ".", "get", "(", "key", ",", "None", ")", "else", "None", ")", "for", "key", "in", "keys", "}" ]
Get items from a sequence a batch at a time .
def batch ( iterable , size ) : item = iter ( iterable ) while True : batch_iterator = islice ( item , size ) try : yield chain ( [ next ( batch_iterator ) ] , batch_iterator ) except StopIteration : return
1,917
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/tools.py#L87-L114
[ "def", "review", "(", "cls", ",", "content", ",", "log", ",", "parent", ",", "window_icon", ")", ":", "# pragma: no cover", "dlg", "=", "DlgReview", "(", "content", ",", "log", ",", "parent", ",", "window_icon", ")", "if", "dlg", ".", "exec_", "(", ")", ":", "return", "dlg", ".", "ui", ".", "edit_main", ".", "toPlainText", "(", ")", ",", "dlg", ".", "ui", ".", "edit_log", ".", "toPlainText", "(", ")", "return", "None", ",", "None" ]
Normalizes string converts to lowercase removes non - alpha characters and converts spaces to hyphens to have nice filenames .
def slugify ( value ) : try : unicode_type = unicode except NameError : unicode_type = str if not isinstance ( value , unicode_type ) : value = unicode_type ( value ) value = ( unicodedata . normalize ( 'NFKD' , value ) . encode ( 'ascii' , 'ignore' ) . decode ( 'ascii' ) ) value = unicode_type ( _SLUGIFY_STRIP_RE . sub ( '' , value ) . strip ( ) ) return _SLUGIFY_HYPHENATE_RE . sub ( '_' , value )
1,918
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/tools.py#L131-L150
[ "def", "set", "(", "self", ",", "type", ",", "offset", ",", "value", ")", ":", "self", ".", "_command_stack", ".", "extend", "(", "[", "'SET'", ",", "type", ",", "offset", ",", "value", "]", ")", "return", "self" ]
Get the citations of a given preprint in plain text .
def get_plaintext_citations ( arxiv_id ) : plaintext_citations = [ ] # Get the list of bbl files for this preprint bbl_files = arxiv . get_bbl ( arxiv_id ) for bbl_file in bbl_files : # Fetch the cited DOIs for each of the bbl files plaintext_citations . extend ( bbl . get_plaintext_citations ( bbl_file ) ) return plaintext_citations
1,919
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/citations/repositories/arxiv.py#L9-L28
[ "def", "on_start", "(", "self", ",", "session", ",", "session_context", ")", ":", "session_id", "=", "session", ".", "session_id", "web_registry", "=", "session_context", "[", "'web_registry'", "]", "if", "self", ".", "is_session_id_cookie_enabled", ":", "web_registry", ".", "session_id", "=", "session_id", "logger", ".", "debug", "(", "\"Set SessionID cookie using id: \"", "+", "str", "(", "session_id", ")", ")", "else", ":", "msg", "=", "(", "\"Session ID cookie is disabled. No cookie has been set for \"", "\"new session with id: \"", "+", "str", "(", "session_id", ")", ")", "logger", ".", "debug", "(", "msg", ")" ]
Get the DOIs of the papers cited in a . bbl file .
def get_cited_dois ( arxiv_id ) : dois = { } # Get the list of bbl files for this preprint bbl_files = arxiv . get_bbl ( arxiv_id ) for bbl_file in bbl_files : # Fetch the cited DOIs for each of the bbl files dois . update ( bbl . get_cited_dois ( bbl_file ) ) return dois
1,920
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/citations/repositories/arxiv.py#L31-L50
[ "def", "reboot", "(", "self", ",", "nodes", "=", "None", ")", ":", "if", "not", "self", ".", "is_connected", "(", ")", ":", "return", "None", "nodes", "=", "nodes", "or", "self", ".", "nodes", "result", "=", "[", "]", "for", "node", "in", "nodes", ":", "if", "node", ".", "state", "==", "'stopped'", ":", "logging", ".", "warning", "(", "'Node %s is \"stopped\" and can not be rebooted.'", ",", "node", ".", "name", ")", "continue", "try", ":", "status", "=", "self", ".", "gce", ".", "reboot_node", "(", "node", ")", "if", "status", ":", "result", ".", "append", "(", "node", ")", "except", "InvalidRequestError", "as", "err", ":", "raise", "ComputeEngineManagerException", "(", "err", ")", "return", "result" ]
Get subcommand options from global parsed arguments .
def get_subcommand_kwargs ( mgr , name , namespace ) : subcmd = mgr . get ( name ) subcmd_kwargs = { } for opt in list ( subcmd . args . values ( ) ) + list ( subcmd . options . values ( ) ) : if hasattr ( namespace , opt . dest ) : subcmd_kwargs [ opt . dest ] = getattr ( namespace , opt . dest ) return ( subcmd , subcmd_kwargs )
1,921
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/main.py#L23-L32
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Parse a plaintext file to get a clean list of plaintext citations . The \ file should have one citation per line .
def get_plaintext_citations ( file ) : # Handle path or content if os . path . isfile ( file ) : with open ( file , 'r' ) as fh : content = fh . readlines ( ) else : content = file . splitlines ( ) # Clean every line to have plaintext cleaned_citations = [ tools . clean_whitespaces ( line ) for line in content ] return cleaned_citations
1,922
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/citations/plaintext.py#L20-L37
[ "def", "concat", "(", "self", ",", "operand", ",", "start", "=", "0", ",", "end", "=", "0", ",", "offset", "=", "0", ")", ":", "if", "not", "Gauged", ".", "map_concat", "(", "self", ".", "ptr", ",", "operand", ".", "ptr", ",", "start", ",", "end", ",", "offset", ")", ":", "raise", "MemoryError" ]
Get the DOIs of the papers cited in a plaintext file . The file should \ have one citation per line .
def get_cited_dois ( file ) : # If file is not a pre-processed list of plaintext citations if not isinstance ( file , list ) : # It is either a path to a plaintext file or the content of a plaintext # file, we need some pre-processing to get a list of citations. plaintext_citations = get_plaintext_citations ( file ) else : # Else, we passed a list of plaintext citations. plaintext_citations = file dois = { } crossref_queue = [ ] # Try to get the DOI directly from the citation for citation in plaintext_citations [ : ] : # Some citations already contain a DOI so try to match it directly matched_dois = doi . extract_from_text ( citation ) if len ( matched_dois ) > 0 : # Add the DOI and go on dois [ citation ] = next ( iter ( matched_dois ) ) continue # Same thing for arXiv id matched_arxiv = arxiv . extract_from_text ( citation ) if len ( matched_arxiv ) > 0 : # Add the associated DOI and go on dois [ citation ] = arxiv . to_doi ( next ( iter ( matched_arxiv ) ) ) continue # If no match found, stack it for next step # Note to remove URLs in the citation as the plaintext citations can # contain URLs and they are bad for the CrossRef API. crossref_queue . append ( tools . remove_urls ( citation ) ) # Do batch with remaining papers, to prevent from the timeout of CrossRef for batch in tools . batch ( crossref_queue , CROSSREF_MAX_BATCH_SIZE ) : batch = [ i for i in batch ] try : # Fetch results from CrossRef request = requests . post ( CROSSREF_LINKS_API_URL , json = batch ) for result in request . json ( ) [ "results" ] : # Try to get a DOI try : dois [ result [ "text" ] ] = result [ "doi" ] except KeyError : # Or set it to None dois [ result [ "text" ] ] = None except ( RequestException , ValueError , KeyError ) : # If an exception occurred, set all the DOIs to None for the # current batch for i in batch : dois [ i ] = None return dois
1,923
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/citations/plaintext.py#L40-L103
[ "def", "get_storage", "(", "self", ")", ":", "if", "self", ".", "storage", ":", "return", "self", ".", "storage", "self", ".", "storage", "=", "self", ".", "reconnect_redis", "(", ")", "return", "self", ".", "storage" ]
Check that a given string is a valid ISBN .
def is_valid ( isbn_id ) : return ( ( not isbnlib . notisbn ( isbn_id ) ) and ( isbnlib . get_canonical_isbn ( isbn_id ) == isbn_id or isbnlib . mask ( isbnlib . get_canonical_isbn ( isbn_id ) ) == isbn_id ) )
1,924
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/isbn.py#L14-L49
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
Extract ISBNs from a text .
def extract_from_text ( text ) : isbns = [ isbnlib . get_canonical_isbn ( isbn ) for isbn in isbnlib . get_isbnlike ( text ) ] return [ i for i in isbns if i is not None ]
1,925
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/isbn.py#L52-L64
[ "def", "login_required", "(", "func", ",", "permission", "=", "None", ")", ":", "@", "wraps", "(", "func", ")", "def", "decorated_function", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "check_token", "(", ")", ":", "return", "login", "(", ")", "elif", "not", "nago", ".", "core", ".", "has_access", "(", "session", ".", "get", "(", "'token'", ")", ")", ":", "return", "http403", "(", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "decorated_function" ]
Get a BibTeX string for the given ISBN .
def get_bibtex ( isbn_identifier ) : # Try to find the BibTeX using associated DOIs bibtex = doi . get_bibtex ( to_doi ( isbn_identifier ) ) if bibtex is None : # In some cases, there are no DOIs for a given ISBN. In this case, try # to fetch bibtex directly from the ISBN, using a combination of # Google Books and worldcat.org results. bibtex = isbnlib . registry . bibformatters [ 'bibtex' ] ( isbnlib . meta ( isbn_identifier , 'default' ) ) return bibtex
1,926
https://github.com/Phyks/libbmc/blob/9ef1a29d2514157d1edd6c13ecbd61b07ae9315e/libbmc/isbn.py#L67-L85
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
Return options already used in the command line
def used_options ( self ) : for option_str in filter ( lambda c : c . startswith ( '-' ) , self . words ) : for option in list ( self . cmd . options . values ( ) ) : if option_str in option . option_strings : yield option
1,927
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/parser.py#L40-L49
[ "def", "_itemize", "(", "objs", ")", ":", "if", "not", "isinstance", "(", "objs", ",", "collections", ".", "Sequence", ")", ":", "raise", "TypeError", "(", "\"expected a sequence of Function\"", ")", "isseq", "=", "[", "isinstance", "(", "obj", ",", "collections", ".", "Sequence", ")", "for", "obj", "in", "objs", "]", "if", "not", "any", "(", "isseq", ")", ":", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "if", "ftype", "is", "None", ":", "if", "isinstance", "(", "obj", ",", "BinaryDecisionDiagram", ")", ":", "ftype", "=", "BinaryDecisionDiagram", "elif", "isinstance", "(", "obj", ",", "Expression", ")", ":", "ftype", "=", "Expression", "elif", "isinstance", "(", "obj", ",", "TruthTable", ")", ":", "ftype", "=", "TruthTable", "else", ":", "raise", "TypeError", "(", "\"expected valid Function inputs\"", ")", "elif", "not", "isinstance", "(", "obj", ",", "ftype", ")", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "return", "list", "(", "objs", ")", ",", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", ",", "ftype", "elif", "all", "(", "isseq", ")", ":", "items", "=", "list", "(", ")", "shape", "=", "None", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "_items", ",", "_shape", ",", "_ftype", "=", "_itemize", "(", "obj", ")", "if", "shape", "is", "None", ":", "shape", "=", "_shape", "elif", "shape", "!=", "_shape", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")", "if", "ftype", "is", "None", ":", "ftype", "=", "_ftype", "elif", "ftype", "!=", "_ftype", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "items", "+=", "_items", "shape", "=", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", "+", "shape", "return", "items", ",", "shape", ",", "ftype", "else", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")" ]
Return options that can be used given the current cmd line
def available_options ( self ) : for option in list ( self . cmd . options . values ( ) ) : if ( option . is_multiple or option not in list ( self . used_options ) ) : yield option
1,928
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/parser.py#L52-L61
[ "def", "_itemize", "(", "objs", ")", ":", "if", "not", "isinstance", "(", "objs", ",", "collections", ".", "Sequence", ")", ":", "raise", "TypeError", "(", "\"expected a sequence of Function\"", ")", "isseq", "=", "[", "isinstance", "(", "obj", ",", "collections", ".", "Sequence", ")", "for", "obj", "in", "objs", "]", "if", "not", "any", "(", "isseq", ")", ":", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "if", "ftype", "is", "None", ":", "if", "isinstance", "(", "obj", ",", "BinaryDecisionDiagram", ")", ":", "ftype", "=", "BinaryDecisionDiagram", "elif", "isinstance", "(", "obj", ",", "Expression", ")", ":", "ftype", "=", "Expression", "elif", "isinstance", "(", "obj", ",", "TruthTable", ")", ":", "ftype", "=", "TruthTable", "else", ":", "raise", "TypeError", "(", "\"expected valid Function inputs\"", ")", "elif", "not", "isinstance", "(", "obj", ",", "ftype", ")", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "return", "list", "(", "objs", ")", ",", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", ",", "ftype", "elif", "all", "(", "isseq", ")", ":", "items", "=", "list", "(", ")", "shape", "=", "None", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "_items", ",", "_shape", ",", "_ftype", "=", "_itemize", "(", "obj", ")", "if", "shape", "is", "None", ":", "shape", "=", "_shape", "elif", "shape", "!=", "_shape", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")", "if", "ftype", "is", "None", ":", "ftype", "=", "_ftype", "elif", "ftype", "!=", "_ftype", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "items", "+=", "_items", "shape", "=", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", "+", "shape", "return", "items", ",", "shape", ",", "ftype", "else", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")" ]
Return args already used in the command line
def used_args ( self ) : # get all arguments values from the command line values = [ ] for idx , c in enumerate ( self . words [ 1 : ] ) : if c . startswith ( '-' ) : continue option_str = self . words [ 1 : ] [ idx - 1 ] option = self . get_option ( option_str ) if option is None or not option . need_value : values . append ( ( c , c == self . document . get_word_before_cursor ( WORD = True ) ) ) logger . debug ( "Found args values %s" % values ) # consume values for arg in self . cmd . args . values ( ) : if not values : raise StopIteration if arg . is_multiple : values = [ ] yield arg elif type ( arg . nargs ) is int : for _ in range ( arg . nargs ) : value = values . pop ( 0 ) # not the current argument if value [ 1 ] is False : yield arg if not values : raise StopIteration
1,929
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/parser.py#L64-L94
[ "def", "_itemize", "(", "objs", ")", ":", "if", "not", "isinstance", "(", "objs", ",", "collections", ".", "Sequence", ")", ":", "raise", "TypeError", "(", "\"expected a sequence of Function\"", ")", "isseq", "=", "[", "isinstance", "(", "obj", ",", "collections", ".", "Sequence", ")", "for", "obj", "in", "objs", "]", "if", "not", "any", "(", "isseq", ")", ":", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "if", "ftype", "is", "None", ":", "if", "isinstance", "(", "obj", ",", "BinaryDecisionDiagram", ")", ":", "ftype", "=", "BinaryDecisionDiagram", "elif", "isinstance", "(", "obj", ",", "Expression", ")", ":", "ftype", "=", "Expression", "elif", "isinstance", "(", "obj", ",", "TruthTable", ")", ":", "ftype", "=", "TruthTable", "else", ":", "raise", "TypeError", "(", "\"expected valid Function inputs\"", ")", "elif", "not", "isinstance", "(", "obj", ",", "ftype", ")", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "return", "list", "(", "objs", ")", ",", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", ",", "ftype", "elif", "all", "(", "isseq", ")", ":", "items", "=", "list", "(", ")", "shape", "=", "None", "ftype", "=", "None", "for", "obj", "in", "objs", ":", "_items", ",", "_shape", ",", "_ftype", "=", "_itemize", "(", "obj", ")", "if", "shape", "is", "None", ":", "shape", "=", "_shape", "elif", "shape", "!=", "_shape", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")", "if", "ftype", "is", "None", ":", "ftype", "=", "_ftype", "elif", "ftype", "!=", "_ftype", ":", "raise", "ValueError", "(", "\"expected uniform Function types\"", ")", "items", "+=", "_items", "shape", "=", "(", "(", "0", ",", "len", "(", "objs", ")", ")", ",", ")", "+", "shape", "return", "items", ",", "shape", ",", "ftype", "else", ":", "raise", "ValueError", "(", "\"expected uniform farray dimensions\"", ")" ]
Return args that can be used given the current cmd line
def available_args ( self ) : used = list ( self . used_args ) logger . debug ( 'Found used args: %s' % used ) for arg in list ( self . cmd . args . values ( ) ) : if ( arg . is_multiple or arg not in used ) : yield arg elif ( type ( arg . nargs ) is int and arg . nargs > 1 and not arg . nargs == used . count ( arg ) ) : yield arg
1,930
https://github.com/eonpatapon/contrail-api-cli/blob/1571bf523fa054f3d6bf83dba43a224fea173a73/contrail_api_cli/parser.py#L97-L112
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id", ",", "to_book_id", ")", "try", ":", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "from_book_id", ")", "except", ":", "# something went wrong, roll back assignment to to_book_id", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "to_book_id", ")", "raise" ]
Returns true if the elem_ref is an element reference
def is_elem_ref ( elem_ref ) : return ( elem_ref and isinstance ( elem_ref , tuple ) and len ( elem_ref ) == 3 and ( elem_ref [ 0 ] == ElemRefObj or elem_ref [ 0 ] == ElemRefArr ) )
1,931
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/core/erefs.py#L11-L23
[ "def", "complete_experiment", "(", "self", ",", "status", ")", ":", "self", ".", "log", "(", "\"Bot player completing experiment. Status: {}\"", ".", "format", "(", "status", ")", ")", "while", "True", ":", "url", "=", "\"{host}/{status}?participant_id={participant_id}\"", ".", "format", "(", "host", "=", "self", ".", "host", ",", "participant_id", "=", "self", ".", "participant_id", ",", "status", "=", "status", ")", "try", ":", "result", "=", "requests", ".", "get", "(", "url", ")", "result", ".", "raise_for_status", "(", ")", "except", "RequestException", ":", "self", ".", "stochastic_sleep", "(", ")", "continue", "return", "result" ]
Gets the element referenced by elem_ref or returns the elem_ref directly if its not a reference .
def get_elem ( elem_ref , default = None ) : if not is_elem_ref ( elem_ref ) : return elem_ref elif elem_ref [ 0 ] == ElemRefObj : return getattr ( elem_ref [ 1 ] , elem_ref [ 2 ] , default ) elif elem_ref [ 0 ] == ElemRefArr : return elem_ref [ 1 ] [ elem_ref [ 2 ] ]
1,932
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/core/erefs.py#L40-L53
[ "def", "set_status", "(", "self", ",", "name", ":", "str", "=", "None", ")", ":", "game", "=", "None", "if", "name", ":", "game", "=", "{", "'name'", ":", "name", "}", "payload", "=", "{", "'op'", ":", "WebSocketEvent", ".", "STATUS_UPDATE", ".", "value", ",", "'d'", ":", "{", "'game'", ":", "game", ",", "'status'", ":", "'online'", ",", "'afk'", ":", "False", ",", "'since'", ":", "0.0", "}", "}", "data", "=", "json", ".", "dumps", "(", "payload", ",", "indent", "=", "2", ")", "self", ".", "logger", ".", "debug", "(", "f'Sending status update payload: {data}'", ")", "self", ".", "_ws", ".", "send", "(", "data", ")" ]
Sets element referenced by the elem_ref . Returns the elem .
def set_elem ( elem_ref , elem ) : if elem_ref is None or elem_ref == elem or not is_elem_ref ( elem_ref ) : return elem elif elem_ref [ 0 ] == ElemRefObj : setattr ( elem_ref [ 1 ] , elem_ref [ 2 ] , elem ) return elem elif elem_ref [ 0 ] == ElemRefArr : elem_ref [ 1 ] [ elem_ref [ 2 ] ] = elem return elem
1,933
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/core/erefs.py#L56-L73
[ "def", "set_status", "(", "self", ",", "name", ":", "str", "=", "None", ")", ":", "game", "=", "None", "if", "name", ":", "game", "=", "{", "'name'", ":", "name", "}", "payload", "=", "{", "'op'", ":", "WebSocketEvent", ".", "STATUS_UPDATE", ".", "value", ",", "'d'", ":", "{", "'game'", ":", "game", ",", "'status'", ":", "'online'", ",", "'afk'", ":", "False", ",", "'since'", ":", "0.0", "}", "}", "data", "=", "json", ".", "dumps", "(", "payload", ",", "indent", "=", "2", ")", "self", ".", "logger", ".", "debug", "(", "f'Sending status update payload: {data}'", ")", "self", ".", "_ws", ".", "send", "(", "data", ")" ]
Revise wording to match canonical and expected forms .
def _preprocess ( inp ) : inp = re . sub ( r'(\b)a(\b)' , r'\g<1>one\g<2>' , inp ) inp = re . sub ( r'to the (.*) power' , r'to \g<1>' , inp ) inp = re . sub ( r'to the (.*?)(\b)' , r'to \g<1>\g<2>' , inp ) inp = re . sub ( r'log of' , r'log' , inp ) inp = re . sub ( r'(square )?root( of)?' , r'sqrt' , inp ) inp = re . sub ( r'squared' , r'to two' , inp ) inp = re . sub ( r'cubed' , r'to three' , inp ) inp = re . sub ( r'divided?( by)?' , r'divide' , inp ) inp = re . sub ( r'(\b)over(\b)' , r'\g<1>divide\g<2>' , inp ) inp = re . sub ( r'(\b)EE(\b)' , r'\g<1>e\g<2>' , inp ) inp = re . sub ( r'(\b)E(\b)' , r'\g<1>e\g<2>' , inp ) inp = re . sub ( r'(\b)pie(\b)' , r'\g<1>pi\g<2>' , inp ) inp = re . sub ( r'(\b)PI(\b)' , r'\g<1>pi\g<2>' , inp ) def findImplicitMultiplications ( inp ) : """Replace omitted 'times' references.""" def findConstantMultiplications ( inp ) : split = inp . split ( ' ' ) revision = "" converter = NumberService ( ) for i , w in enumerate ( split ) : if i > 0 and w in MathService . __constants__ : if converter . isValid ( split [ i - 1 ] ) : revision += " times" if not revision : revision = w else : revision += " " + w return revision def findUnaryMultiplications ( inp ) : split = inp . split ( ' ' ) revision = "" for i , w in enumerate ( split ) : if i > 0 and w in MathService . __unaryOperators__ : last_op = split [ i - 1 ] binary = last_op in MathService . __binaryOperators__ unary = last_op in MathService . __unaryOperators__ if last_op and not ( binary or unary ) : revision += " times" if not revision : revision = w else : revision += " " + w return revision return findUnaryMultiplications ( findConstantMultiplications ( inp ) ) return findImplicitMultiplications ( inp )
1,934
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/solver.py#L66-L123
[ "def", "run", "(", "self", ")", ":", "if", "self", ".", "debug", ":", "print", "(", "\"Starting \"", "+", "self", ".", "name", ")", "# Lancement du programme du thread", "if", "isinstance", "(", "self", ".", "function", ",", "str", ")", ":", "globals", "(", ")", "[", "self", ".", "function", "]", "(", "*", "self", ".", "args", ",", "*", "*", "self", ".", "kwargs", ")", "else", ":", "self", ".", "function", "(", "*", "self", ".", "args", ",", "*", "*", "self", ".", "kwargs", ")", "if", "self", ".", "debug", ":", "print", "(", "\"Exiting \"", "+", "self", ".", "name", ")" ]
Calculates a final value given a set of numbers and symbols .
def _calculate ( numbers , symbols ) : if len ( numbers ) is 1 : return numbers [ 0 ] precedence = [ [ pow ] , [ mul , div ] , [ add , sub ] ] # Find most important operation for op_group in precedence : for i , op in enumerate ( symbols ) : if op in op_group : # Apply operation a = numbers [ i ] b = numbers [ i + 1 ] result = MathService . _applyBinary ( a , b , op ) new_numbers = numbers [ : i ] + [ result ] + numbers [ i + 2 : ] new_symbols = symbols [ : i ] + symbols [ i + 1 : ] return MathService . _calculate ( new_numbers , new_symbols )
1,935
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/solver.py#L126-L144
[ "def", "libvlc_video_set_crop_geometry", "(", "p_mi", ",", "psz_geometry", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_crop_geometry'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_crop_geometry'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_char_p", ")", "return", "f", "(", "p_mi", ",", "psz_geometry", ")" ]
Solves the equation specified by the input string .
def parseEquation ( self , inp ) : inp = MathService . _preprocess ( inp ) split = inp . split ( ' ' ) # Recursive call on unary operators for i , w in enumerate ( split ) : if w in self . __unaryOperators__ : op = self . __unaryOperators__ [ w ] # Split equation into halves eq1 = ' ' . join ( split [ : i ] ) eq2 = ' ' . join ( split [ i + 1 : ] ) # Calculate second half result = MathService . _applyUnary ( self . parseEquation ( eq2 ) , op ) return self . parseEquation ( eq1 + " " + str ( result ) ) def extractNumbersAndSymbols ( inp ) : numbers = [ ] symbols = [ ] # Divide into values (numbers), operators (symbols) next_number = "" for w in inp . split ( ' ' ) : if w in self . __binaryOperators__ : symbols . append ( self . __binaryOperators__ [ w ] ) if next_number : numbers . append ( next_number ) next_number = "" else : if next_number : next_number += " " next_number += w if next_number : numbers . append ( next_number ) # Cast numbers from words to integers def convert ( n ) : if n in self . __constants__ : return self . __constants__ [ n ] converter = NumberService ( ) return converter . parse ( n ) numbers = [ convert ( n ) for n in numbers ] return numbers , symbols numbers , symbols = extractNumbersAndSymbols ( inp ) return MathService . _calculate ( numbers , symbols )
1,936
https://github.com/crm416/semantic/blob/46deb8fefb3ea58aad2fedc8d0d62f3ee254b8fe/semantic/solver.py#L146-L209
[ "def", "convertTimestamps", "(", "column", ")", ":", "tempColumn", "=", "column", "try", ":", "# Try to convert the first row and a random row instead of the complete", "# column, might be faster", "# tempValue = np.datetime64(column[0])", "tempValue", "=", "np", ".", "datetime64", "(", "column", "[", "randint", "(", "0", ",", "len", "(", "column", ".", "index", ")", "-", "1", ")", "]", ")", "tempColumn", "=", "column", ".", "apply", "(", "to_datetime", ")", "except", "Exception", ":", "pass", "return", "tempColumn" ]
Registers a new command for a plugin .
def register ( self , command , description , function , params = [ ] ) : return self . app . commands . register ( command , description , function , params , self . plugin )
1,937
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_commands_pattern.py#L80-L90
[ "def", "_bsecurate_cli_compare_basis_files", "(", "args", ")", ":", "ret", "=", "curate", ".", "compare_basis_files", "(", "args", ".", "file1", ",", "args", ".", "file2", ",", "args", ".", "readfmt1", ",", "args", ".", "readfmt2", ",", "args", ".", "uncontract_general", ")", "if", "ret", ":", "return", "\"No difference found\"", "else", ":", "return", "\"DIFFERENCES FOUND. SEE ABOVE\"" ]
Returns commands which can be filtered by name .
def get ( self , name = None ) : return self . app . commands . get ( name , self . plugin )
1,938
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_commands_pattern.py#L101-L109
[ "def", "run", "(", "self", ")", ":", "port", ",", "tensorboard_process", "=", "self", ".", "create_tensorboard_process", "(", ")", "LOGGER", ".", "info", "(", "'TensorBoard 0.1.7 at http://localhost:{}'", ".", "format", "(", "port", ")", ")", "while", "not", "self", ".", "estimator", ".", "checkpoint_path", ":", "self", ".", "event", ".", "wait", "(", "1", ")", "with", "self", ".", "_temporary_directory", "(", ")", "as", "aws_sync_dir", ":", "while", "not", "self", ".", "event", ".", "is_set", "(", ")", ":", "args", "=", "[", "'aws'", ",", "'s3'", ",", "'sync'", ",", "self", ".", "estimator", ".", "checkpoint_path", ",", "aws_sync_dir", "]", "subprocess", ".", "call", "(", "args", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "self", ".", "_sync_directories", "(", "aws_sync_dir", ",", "self", ".", "logdir", ")", "self", ".", "event", ".", "wait", "(", "10", ")", "tensorboard_process", ".", "terminate", "(", ")" ]
Returns commands which can be filtered by name or plugin .
def get ( self , name = None , plugin = None ) : if plugin is not None : if name is None : command_list = { } for key in self . _commands . keys ( ) : if self . _commands [ key ] . plugin == plugin : command_list [ key ] = self . _commands [ key ] return command_list else : if name in self . _commands . keys ( ) : if self . _commands [ name ] . plugin == plugin : return self . _commands [ name ] else : return None else : return None else : if name is None : return self . _commands else : if name in self . _commands . keys ( ) : return self . _commands [ name ] else : return None
1,939
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_commands_pattern.py#L130-L162
[ "def", "post", "(", "self", ",", "uri", ",", "body", "=", "None", ",", "logon_required", "=", "True", ",", "wait_for_completion", "=", "True", ",", "operation_timeout", "=", "None", ")", ":", "try", ":", "return", "self", ".", "_urihandler", ".", "post", "(", "self", ".", "_hmc", ",", "uri", ",", "body", ",", "logon_required", ",", "wait_for_completion", ")", "except", "HTTPError", "as", "exc", ":", "raise", "zhmcclient", ".", "HTTPError", "(", "exc", ".", "response", "(", ")", ")", "except", "ConnectionError", "as", "exc", ":", "raise", "zhmcclient", ".", "ConnectionError", "(", "exc", ".", "message", ",", "None", ")" ]
Unregisters an existing command so that this command is no longer available on the command line interface .
def unregister ( self , command ) : if command not in self . _commands . keys ( ) : self . log . warning ( "Can not unregister command %s" % command ) else : # Click does not have any kind of a function to unregister/remove/deactivate already added commands. # So we need to delete the related objects manually from the click internal commands dictionary for # our root command. del ( self . _click_root_command . commands [ command ] ) # Finally lets delete the command from our internal dictionary too. del ( self . _commands [ command ] ) self . log . debug ( "Command %s got unregistered" % command )
1,940
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_commands_pattern.py#L185-L202
[ "def", "adapt_single_html", "(", "html", ")", ":", "html_root", "=", "etree", ".", "fromstring", "(", "html", ")", "metadata", "=", "parse_metadata", "(", "html_root", ".", "xpath", "(", "'//*[@data-type=\"metadata\"]'", ")", "[", "0", "]", ")", "id_", "=", "metadata", "[", "'cnx-archive-uri'", "]", "or", "'book'", "binder", "=", "Binder", "(", "id_", ",", "metadata", "=", "metadata", ")", "nav_tree", "=", "parse_navigation_html_to_tree", "(", "html_root", ",", "id_", ")", "body", "=", "html_root", ".", "xpath", "(", "'//xhtml:body'", ",", "namespaces", "=", "HTML_DOCUMENT_NAMESPACES", ")", "_adapt_single_html_tree", "(", "binder", ",", "body", "[", "0", "]", ",", "nav_tree", ",", "top_metadata", "=", "metadata", ")", "return", "binder" ]
Returns a declared caveat asserting that the given key is set to the given value .
def declared_caveat ( key , value ) : if key . find ( ' ' ) >= 0 or key == '' : return error_caveat ( 'invalid caveat \'declared\' key "{}"' . format ( key ) ) return _first_party ( COND_DECLARED , key + ' ' + value )
1,941
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_caveat.py#L33-L46
[ "def", "exec_start", "(", "self", ",", "exec_id", ",", "detach", "=", "False", ",", "tty", "=", "False", ",", "stream", "=", "False", ",", "socket", "=", "False", ",", "demux", "=", "False", ")", ":", "# we want opened socket if socket == True", "data", "=", "{", "'Tty'", ":", "tty", ",", "'Detach'", ":", "detach", "}", "headers", "=", "{", "}", "if", "detach", "else", "{", "'Connection'", ":", "'Upgrade'", ",", "'Upgrade'", ":", "'tcp'", "}", "res", "=", "self", ".", "_post_json", "(", "self", ".", "_url", "(", "'/exec/{0}/start'", ",", "exec_id", ")", ",", "headers", "=", "headers", ",", "data", "=", "data", ",", "stream", "=", "True", ")", "if", "detach", ":", "return", "self", ".", "_result", "(", "res", ")", "if", "socket", ":", "return", "self", ".", "_get_raw_response_socket", "(", "res", ")", "return", "self", ".", "_read_from_socket", "(", "res", ",", "stream", ",", "tty", "=", "tty", ",", "demux", "=", "demux", ")" ]
Helper for allow_caveat and deny_caveat .
def _operation_caveat ( cond , ops ) : for op in ops : if op . find ( ' ' ) != - 1 : return error_caveat ( 'invalid operation name "{}"' . format ( op ) ) return _first_party ( cond , ' ' . join ( ops ) )
1,942
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/checkers/_caveat.py#L81-L89
[ "def", "__write_to_character_device", "(", "self", ",", "event_list", ",", "timeval", "=", "None", ")", ":", "# Remember the position of the stream", "pos", "=", "self", ".", "_character_device", ".", "tell", "(", ")", "# Go to the end of the stream", "self", ".", "_character_device", ".", "seek", "(", "0", ",", "2", ")", "# Write the new data to the end", "for", "event", "in", "event_list", ":", "self", ".", "_character_device", ".", "write", "(", "event", ")", "# Add a sync marker", "sync", "=", "self", ".", "create_event_object", "(", "\"Sync\"", ",", "0", ",", "0", ",", "timeval", ")", "self", ".", "_character_device", ".", "write", "(", "sync", ")", "# Put the stream back to its original position", "self", ".", "_character_device", ".", "seek", "(", "pos", ")" ]
Return s as a bytes type using utf - 8 encoding if necessary .
def to_bytes ( s ) : if isinstance ( s , six . binary_type ) : return s if isinstance ( s , six . string_types ) : return s . encode ( 'utf-8' ) raise TypeError ( 'want string or bytes, got {}' , type ( s ) )
1,943
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/_utils/__init__.py#L18-L27
[ "def", "detach_session", "(", "self", ")", ":", "if", "self", ".", "_session", "is", "not", "None", ":", "self", ".", "_session", ".", "unsubscribe", "(", "self", ")", "self", ".", "_session", "=", "None" ]
Base64 decodes a base64 - encoded string in URL - safe or normal format with or without padding . The argument may be string or bytes .
def b64decode ( s ) : # add padding if necessary. s = to_bytes ( s ) if not s . endswith ( b'=' ) : s = s + b'=' * ( - len ( s ) % 4 ) try : if '_' or '-' in s : return base64 . urlsafe_b64decode ( s ) else : return base64 . b64decode ( s ) except ( TypeError , binascii . Error ) as e : raise ValueError ( str ( e ) )
1,944
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/_utils/__init__.py#L79-L98
[ "def", "remove_experiment", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "experiments", ":", "self", ".", "experiments", ".", "pop", "(", "id", ")", "self", ".", "write_file", "(", ")" ]
Base64 encode using URL - safe encoding with padding removed .
def raw_urlsafe_b64encode ( b ) : b = to_bytes ( b ) b = base64 . urlsafe_b64encode ( b ) b = b . rstrip ( b'=' ) # strip padding return b
1,945
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/_utils/__init__.py#L101-L110
[ "def", "_on_group_stream_changed", "(", "self", ",", "data", ")", ":", "self", ".", "_groups", ".", "get", "(", "data", ".", "get", "(", "'id'", ")", ")", ".", "update_stream", "(", "data", ")" ]
Return a new Cookie using a slightly more friendly API than that provided by six . moves . http_cookiejar
def cookie ( url , name , value , expires = None ) : u = urlparse ( url ) domain = u . hostname if '.' not in domain and not _is_ip_addr ( domain ) : domain += ".local" port = str ( u . port ) if u . port is not None else None secure = u . scheme == 'https' if expires is not None : if expires . tzinfo is not None : raise ValueError ( 'Cookie expiration must be a naive datetime' ) expires = ( expires - datetime ( 1970 , 1 , 1 ) ) . total_seconds ( ) return http_cookiejar . Cookie ( version = 0 , name = name , value = value , port = port , port_specified = port is not None , domain = domain , domain_specified = True , domain_initial_dot = False , path = u . path , path_specified = True , secure = secure , expires = expires , discard = False , comment = None , comment_url = None , rest = None , rfc2109 = False , )
1,946
https://github.com/go-macaroon-bakery/py-macaroon-bakery/blob/63ce1ef1dabe816eb8aaec48fbb46761c34ddf77/macaroonbakery/_utils/__init__.py#L123-L165
[ "async", "def", "with_exception", "(", "self", ",", "subprocess", ",", "*", "matchers", ")", ":", "def", "_callback", "(", "event", ",", "matcher", ")", ":", "raise", "RoutineException", "(", "matcher", ",", "event", ")", "return", "await", "self", ".", "with_callback", "(", "subprocess", ",", "_callback", ",", "*", "matchers", ")" ]
LOGIN CAN ONLY BE DONE BY POSTING TO A HTTP FORM . A COOKIE IS THEN USED FOR INTERACTING WITH THE API
def _login ( self ) : self . logger . debug ( "Logging into " + "{}/{}" . format ( self . _im_api_url , "j_spring_security_check" ) ) self . _im_session . headers . update ( { 'Content-Type' : 'application/x-www-form-urlencoded' , 'User-Agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36' } ) #self._im_session.mount('https://', TLS1Adapter()) #self._im_verify_ssl = False self . j_username = self . _username self . j_password = self . _password requests . packages . urllib3 . disable_warnings ( ) # Disable unverified connection warning. payload = { 'j_username' : self . j_username , 'j_password' : self . j_password , 'submit' : 'Login' } # login to ScaleIO IM r = self . _im_session . post ( "{}/{}" . format ( self . _im_api_url , "j_spring_security_check" ) , verify = self . _im_verify_ssl , #headers = {'Content-Type':'application/x-www-form-urlencoded', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36'}, data = payload ) self . logger . debug ( "Login POST response: " + "{}" . format ( r . text ) ) self . _im_logged_in = True """ ADD CODE: Check if this is IM have existing configuration. If so populate ScaleIO_configurtion_object """
1,947
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/im.py#L107-L134
[ "def", "moment_inertia", "(", "self", ")", ":", "tensor", "=", "inertia", ".", "sphere_inertia", "(", "mass", "=", "self", ".", "volume", ",", "radius", "=", "self", ".", "primitive", ".", "radius", ")", "return", "tensor" ]
Convinient method for GET requests Returns http request status value from a POST request
def _do_get ( self , uri , * * kwargs ) : #TODO: # Add error handling. Check for HTTP status here would be much more conveinent than in each calling method scaleioapi_get_headers = { 'Content-type' : 'application/json' , 'Version' : '1.0' } self . logger . debug ( "_do_get() " + "{}/{}" . format ( self . _api_url , uri ) ) if kwargs : for key , value in kwargs . iteritems ( ) : if key == 'headers' : scaleio_get_headersvalue = value try : #response = self._im_session.get("{}/{}".format(self._api_url, uri), headers = scaleioapi_get_headers, payload = scaleio_payload).json() response = self . _im_session . get ( "{}/{}" . format ( self . _api_url , uri ) , * * kwargs ) . json ( ) #response = self._session.get(url, headers=scaleioapi_post_headers, **kwargs) if response . status_code == requests . codes . ok : return response else : raise RuntimeError ( "_do_get() - HTTP response error" + response . status_code ) except : raise RuntimeError ( "_do_get() - Communication error with ScaleIO gateway" ) return response
1,948
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/im.py#L144-L169
[ "def", "crop_to_extents", "(", "img1", ",", "img2", ",", "padding", ")", ":", "beg_coords1", ",", "end_coords1", "=", "crop_coords", "(", "img1", ",", "padding", ")", "beg_coords2", ",", "end_coords2", "=", "crop_coords", "(", "img2", ",", "padding", ")", "beg_coords", "=", "np", ".", "fmin", "(", "beg_coords1", ",", "beg_coords2", ")", "end_coords", "=", "np", ".", "fmax", "(", "end_coords1", ",", "end_coords2", ")", "img1", "=", "crop_3dimage", "(", "img1", ",", "beg_coords", ",", "end_coords", ")", "img2", "=", "crop_3dimage", "(", "img2", ",", "beg_coords", ",", "end_coords", ")", "return", "img1", ",", "img2" ]
Parameters as they look in the form for uploading packages to IM
def uploadFileToIM ( self , directory , filename , title ) : self . logger . debug ( "uploadFileToIM(" + "{},{},{})" . format ( directory , filename , title ) ) parameters = { 'data-filename-placement' : 'inside' , 'title' : str ( filename ) , 'filename' : str ( filename ) , 'type' : 'file' , 'name' : 'files' , 'id' : 'fileToUpload' , 'multiple' : '' } file_dict = { 'files' : ( str ( filename ) , open ( directory + filename , 'rb' ) , 'application/x-rpm' ) } m = MultipartEncoder ( fields = file_dict ) temp_username = self . _username temp_password = self . _password temp_im_api_url = self . _im_api_url temp_im_session = requests . Session ( ) temp_im_session . mount ( 'https://' , TLS1Adapter ( ) ) temp_im_verify_ssl = self . _im_verify_ssl resp = temp_im_session . post ( "{}/{}" . format ( temp_im_api_url , "types/InstallationPackage/instances/uploadPackage" ) , auth = HTTPBasicAuth ( temp_username , temp_password ) , #headers = m.content_type, files = file_dict , verify = False , data = parameters ) self . logger . info ( "Uploaded: " + "{}" . format ( filename ) ) self . logger . debug ( "HTTP Response: " + "{}" . format ( resp . status_code ) )
1,949
https://github.com/swevm/scaleio-py/blob/d043a0137cb925987fd5c895a3210968ce1d9028/scaleiopy/im.py#L670-L702
[ "def", "get_covariance_table", "(", "self", ",", "chain", "=", "0", ",", "parameters", "=", "None", ",", "caption", "=", "\"Parameter Covariance\"", ",", "label", "=", "\"tab:parameter_covariance\"", ")", ":", "parameters", ",", "cov", "=", "self", ".", "get_covariance", "(", "chain", "=", "chain", ",", "parameters", "=", "parameters", ")", "return", "self", ".", "_get_2d_latex_table", "(", "parameters", ",", "cov", ",", "caption", ",", "label", ")" ]
Binary dump of the integer of given type
async def dump_varint_t ( writer , type_or , pv ) : width = int_mark_to_size ( type_or ) n = ( pv << 2 ) | type_or buffer = _UINT_BUFFER for _ in range ( width ) : buffer [ 0 ] = n & 0xff await writer . awrite ( buffer ) n >>= 8 return width
1,950
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L210-L228
[ "def", "_create_projects_file", "(", "project_name", ",", "data_source", ",", "items", ")", ":", "repositories", "=", "[", "]", "for", "item", "in", "items", ":", "if", "item", "[", "'origin'", "]", "not", "in", "repositories", ":", "repositories", ".", "append", "(", "item", "[", "'origin'", "]", ")", "projects", "=", "{", "project_name", ":", "{", "data_source", ":", "repositories", "}", "}", "projects_file", ",", "projects_file_path", "=", "tempfile", ".", "mkstemp", "(", "prefix", "=", "'track_items_'", ")", "with", "open", "(", "projects_file_path", ",", "\"w\"", ")", "as", "pfile", ":", "json", ".", "dump", "(", "projects", ",", "pfile", ",", "indent", "=", "True", ")", "return", "projects_file_path" ]
Binary dump of the variable size integer
async def dump_varint ( writer , val ) : if val <= 63 : return await dump_varint_t ( writer , PortableRawSizeMark . BYTE , val ) elif val <= 16383 : return await dump_varint_t ( writer , PortableRawSizeMark . WORD , val ) elif val <= 1073741823 : return await dump_varint_t ( writer , PortableRawSizeMark . DWORD , val ) else : if val > 4611686018427387903 : raise ValueError ( 'Int too big' ) return await dump_varint_t ( writer , PortableRawSizeMark . INT64 , val )
1,951
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L231-L248
[ "def", "load_market_data", "(", "trading_day", "=", "None", ",", "trading_days", "=", "None", ",", "bm_symbol", "=", "'SPY'", ",", "environ", "=", "None", ")", ":", "if", "trading_day", "is", "None", ":", "trading_day", "=", "get_calendar", "(", "'XNYS'", ")", ".", "day", "if", "trading_days", "is", "None", ":", "trading_days", "=", "get_calendar", "(", "'XNYS'", ")", ".", "all_sessions", "first_date", "=", "trading_days", "[", "0", "]", "now", "=", "pd", ".", "Timestamp", ".", "utcnow", "(", ")", "# we will fill missing benchmark data through latest trading date", "last_date", "=", "trading_days", "[", "trading_days", ".", "get_loc", "(", "now", ",", "method", "=", "'ffill'", ")", "]", "br", "=", "ensure_benchmark_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ",", "# We need the trading_day to figure out the close prior to the first", "# date so that we can compute returns for the first date.", "trading_day", ",", "environ", ",", ")", "tc", "=", "ensure_treasury_data", "(", "bm_symbol", ",", "first_date", ",", "last_date", ",", "now", ",", "environ", ",", ")", "# combine dt indices and reindex using ffill then bfill", "all_dt", "=", "br", ".", "index", ".", "union", "(", "tc", ".", "index", ")", "br", "=", "br", ".", "reindex", "(", "all_dt", ",", "method", "=", "'ffill'", ")", ".", "fillna", "(", "method", "=", "'bfill'", ")", "tc", "=", "tc", ".", "reindex", "(", "all_dt", ",", "method", "=", "'ffill'", ")", ".", "fillna", "(", "method", "=", "'bfill'", ")", "benchmark_returns", "=", "br", "[", "br", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "treasury_curves", "=", "tc", "[", "tc", ".", "index", ".", "slice_indexer", "(", "first_date", ",", "last_date", ")", "]", "return", "benchmark_returns", ",", "treasury_curves" ]
Binary load of variable size integer serialized by dump_varint
async def load_varint ( reader ) : buffer = _UINT_BUFFER await reader . areadinto ( buffer ) width = int_mark_to_size ( buffer [ 0 ] & PortableRawSizeMark . MASK ) result = buffer [ 0 ] shift = 8 for _ in range ( width - 1 ) : await reader . areadinto ( buffer ) result += buffer [ 0 ] << shift shift += 8 return result >> 2
1,952
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L251-L269
[ "async", "def", "on_raw_422", "(", "self", ",", "message", ")", ":", "await", "self", ".", "_registration_completed", "(", "message", ")", "self", ".", "motd", "=", "None", "await", "self", ".", "on_connect", "(", ")" ]
Binary string dump
async def dump_string ( writer , val ) : await dump_varint ( writer , len ( val ) ) await writer . awrite ( val )
1,953
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L272-L281
[ "def", "__numero_tres_cifras", "(", "self", ",", "number", ",", "indice", "=", "None", ",", "sing", "=", "False", ")", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "30", ":", "if", "sing", ":", "return", "especiales_apocopado", "[", "number", "]", "else", ":", "return", "especiales_masculino", "[", "number", "]", "elif", "number", "<", "100", ":", "texto", "=", "decenas", "[", "number", "//", "10", "]", "resto", "=", "number", "%", "10", "if", "resto", ":", "texto", "+=", "' y %s'", "%", "self", ".", "__numero_tres_cifras", "(", "resto", ",", "None", ",", "sing", ")", "return", "texto", "if", "number", "==", "100", ":", "return", "'cien'", "if", "number", "<", "1000", ":", "texto", "=", "centena_masculino", "[", "number", "//", "100", "]", "resto", "=", "number", "%", "100", "if", "resto", ":", "texto", "+=", "' %s'", "%", "self", ".", "__numero_tres_cifras", "(", "resto", ",", "None", ",", "sing", ")", "return", "texto" ]
Loads string from binary stream
async def load_string ( reader ) : ivalue = await load_varint ( reader ) fvalue = bytearray ( ivalue ) await reader . areadinto ( fvalue ) return bytes ( fvalue )
1,954
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L284-L294
[ "def", "custom_server_error", "(", "request", ",", "template_name", "=", "'500.html'", ",", "admin_template_name", "=", "'500A.html'", ")", ":", "trace", "=", "None", "if", "request", ".", "user", ".", "is_authenticated", "(", ")", "and", "(", "request", ".", "user", ".", "is_staff", "or", "request", ".", "user", ".", "is_superuser", ")", ":", "try", ":", "import", "traceback", ",", "sys", "trace", "=", "traceback", ".", "format_exception", "(", "*", "(", "sys", ".", "exc_info", "(", ")", ")", ")", "if", "not", "request", ".", "user", ".", "is_superuser", "and", "trace", ":", "trace", "=", "trace", "[", "-", "1", ":", "]", "trace", "=", "'\\n'", ".", "join", "(", "trace", ")", "except", ":", "pass", "# if url is part of the admin site, use the 500A.html template", "if", "request", ".", "path", ".", "startswith", "(", "'/%s'", "%", "admin", ".", "site", ".", "name", ")", ":", "template_name", "=", "admin_template_name", "t", "=", "loader", ".", "get_template", "(", "template_name", ")", "# You need to create a 500.html and 500A.html template.", "return", "http", ".", "HttpResponseServerError", "(", "t", ".", "render", "(", "Context", "(", "{", "'trace'", ":", "trace", "}", ")", ")", ")" ]
Dumps blob to a binary stream
async def dump_blob ( writer , elem , elem_type , params = None ) : elem_is_blob = isinstance ( elem , x . BlobType ) data = bytes ( getattr ( elem , x . BlobType . DATA_ATTR ) if elem_is_blob else elem ) await dump_varint ( writer , len ( elem ) ) await writer . awrite ( data )
1,955
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L297-L310
[ "def", "setup_privnet", "(", "self", ",", "host", "=", "None", ")", ":", "self", ".", "setup", "(", "FILENAME_SETTINGS_PRIVNET", ")", "if", "isinstance", "(", "host", ",", "str", ")", ":", "if", "\":\"", "in", "host", ":", "raise", "Exception", "(", "\"No protocol prefix or port allowed in host, use just the IP or domain.\"", ")", "print", "(", "\"Using custom privatenet host:\"", ",", "host", ")", "self", ".", "SEED_LIST", "=", "[", "\"%s:20333\"", "%", "host", "]", "self", ".", "RPC_LIST", "=", "[", "\"http://%s:30333\"", "%", "host", "]", "print", "(", "\"- P2P:\"", ",", "\", \"", ".", "join", "(", "self", ".", "SEED_LIST", ")", ")", "print", "(", "\"- RPC:\"", ",", "\", \"", ".", "join", "(", "self", ".", "RPC_LIST", ")", ")", "self", ".", "check_privatenet", "(", ")" ]
Loads container of elements from the reader . Supports the container ref . Returns loaded container . Blob array writer as in XMRRPC is serialized without size serialization .
async def container_load ( self , container_type , params = None , container = None , obj = None ) : elem_type = x . container_elem_type ( container_type , params ) elem_size = await self . get_element_size ( elem_type = elem_type , params = params ) # If container is of fixed size we know the size to load from the input. # Otherwise we have to read to the end data_left = len ( self . iobj . buffer ) c_len = container_type . SIZE if not container_type . FIX_SIZE : if data_left == 0 : return None if data_left % elem_size != 0 : raise helpers . ArchiveException ( 'Container size mod elem size not 0' ) c_len = data_left // elem_size res = container if container else [ ] for i in range ( c_len ) : try : self . tracker . push_index ( i ) fvalue = await self . _load_field ( elem_type , params [ 1 : ] if params else None , x . eref ( res , i ) if container else None ) self . tracker . pop ( ) except Exception as e : raise helpers . ArchiveException ( e , tracker = self . tracker ) from e if not container : res . append ( fvalue ) return res
1,956
https://github.com/ph4r05/monero-serialize/blob/cebb3ba2aaf2e9211b1dcc6db2bab02946d06e42/monero_serialize/xmrrpc.py#L682-L721
[ "def", "with_category", "(", "category", ":", "str", ")", "->", "Callable", ":", "def", "cat_decorator", "(", "func", ")", ":", "categorize", "(", "func", ",", "category", ")", "return", "func", "return", "cat_decorator" ]
Create an index . html for one package .
def make_index_for ( package , index_dir , verbose = True ) : index_template = """\ <html> <head><title>{title}</title></head> <body> <h1>{title}</h1> <ul> {packages} </ul> </body> </html> """ item_template = '<li><a href="{1}">{0}</a></li>' index_filename = os . path . join ( index_dir , "index.html" ) if not os . path . isdir ( index_dir ) : os . makedirs ( index_dir ) parts = [ ] for pkg_filename in package . files : pkg_name = os . path . basename ( pkg_filename ) if pkg_name == "index.html" : # -- ROOT-INDEX: pkg_name = os . path . basename ( os . path . dirname ( pkg_filename ) ) else : pkg_name = package . splitext ( pkg_name ) pkg_relpath_to = os . path . relpath ( pkg_filename , index_dir ) parts . append ( item_template . format ( pkg_name , pkg_relpath_to ) ) if not parts : print ( "OOPS: Package %s has no files" % package . name ) return if verbose : root_index = not Package . isa ( package . files [ 0 ] ) if root_index : info = "with %d package(s)" % len ( package . files ) else : package_versions = sorted ( set ( package . versions ) ) info = ", " . join ( reversed ( package_versions ) ) message = "%-30s %s" % ( package . name , info ) print ( message ) with open ( index_filename , "w" ) as f : packages = "\n" . join ( parts ) text = index_template . format ( title = package . name , packages = packages ) f . write ( text . strip ( ) ) f . close ( )
1,957
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/bin/make_localpi.py#L118-L170
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Create a pypi server like file structure below download directory .
def make_package_index ( download_dir ) : if not os . path . isdir ( download_dir ) : raise ValueError ( "No such directory: %r" % download_dir ) pkg_rootdir = os . path . join ( download_dir , "simple" ) if os . path . isdir ( pkg_rootdir ) : shutil . rmtree ( pkg_rootdir , ignore_errors = True ) os . mkdir ( pkg_rootdir ) # -- STEP: Collect all packages. package_map = { } packages = [ ] for filename in sorted ( os . listdir ( download_dir ) ) : if not Package . isa ( filename ) : continue pkg_filepath = os . path . join ( download_dir , filename ) package_name = Package . get_pkgname ( pkg_filepath ) package = package_map . get ( package_name , None ) if not package : # -- NEW PACKAGE DETECTED: Store/register package. package = Package ( pkg_filepath ) package_map [ package . name ] = package packages . append ( package ) else : # -- SAME PACKAGE: Collect other variant/version. package . files . append ( pkg_filepath ) # -- STEP: Make local PYTHON PACKAGE INDEX. root_package = Package ( None , "Python Package Index" ) root_package . files = [ os . path . join ( pkg_rootdir , pkg . name , "index.html" ) for pkg in packages ] make_index_for ( root_package , pkg_rootdir ) for package in packages : index_dir = os . path . join ( pkg_rootdir , package . name ) make_index_for ( package , index_dir )
1,958
https://github.com/jenisys/parse_type/blob/7cad3a67a5ca725cb786da31f656fd473084289f/bin/make_localpi.py#L173-L233
[ "def", "color_group", "(", "self", ",", "checked", "=", "False", ",", "test_color", "=", "None", ")", ":", "group", "=", "self", ".", "tabs", ".", "currentWidget", "(", ")", "if", "test_color", "is", "None", ":", "newcolor", "=", "QColorDialog", ".", "getColor", "(", "group", ".", "idx_color", ")", "else", ":", "newcolor", "=", "test_color", "group", ".", "idx_color", "=", "newcolor", "self", ".", "apply", "(", ")" ]
Return a list value translating from other types if necessary .
def _convert_to_list ( self , value , delimiters ) : if not value : return [ ] if delimiters : return [ l . strip ( ) for l in value . split ( delimiters ) ] return [ l . strip ( ) for l in value . split ( ) ]
1,959
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/config.py#L30-L40
[ "def", "is_zipfile", "(", "filename", ")", ":", "try", ":", "fpin", "=", "open", "(", "filename", ",", "\"rb\"", ")", "endrec", "=", "_EndRecData", "(", "fpin", ")", "fpin", ".", "close", "(", ")", "if", "endrec", ":", "return", "True", "# file has correct magic number", "except", "IOError", ":", "pass", "return", "False" ]
A convenience method which coerces the option in the specified section to a list of strings .
def getlist ( self , section , option , raw = False , vars = None , fallback = [ ] , delimiters = ',' ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) return self . _convert_to_list ( v , delimiters = delimiters )
1,960
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/config.py#L50-L55
[ "def", "cudaMemcpy_htod", "(", "dst", ",", "src", ",", "count", ")", ":", "status", "=", "_libcudart", ".", "cudaMemcpy", "(", "dst", ",", "src", ",", "ctypes", ".", "c_size_t", "(", "count", ")", ",", "cudaMemcpyHostToDevice", ")", "cudaCheckStatus", "(", "status", ")" ]
A convenience method which coerces the option in the specified section to a file .
def getfile ( self , section , option , raw = False , vars = None , fallback = "" , validate = False ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) v = self . _convert_to_path ( v ) return v if not validate or os . path . isfile ( v ) else fallback
1,961
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/config.py#L57-L63
[ "def", "attention_bias_proximal", "(", "length", ")", ":", "r", "=", "tf", ".", "to_float", "(", "tf", ".", "range", "(", "length", ")", ")", "diff", "=", "tf", ".", "expand_dims", "(", "r", ",", "0", ")", "-", "tf", ".", "expand_dims", "(", "r", ",", "1", ")", "return", "tf", ".", "expand_dims", "(", "tf", ".", "expand_dims", "(", "-", "tf", ".", "log1p", "(", "tf", ".", "abs", "(", "diff", ")", ")", ",", "0", ")", ",", "0", ")" ]
A convenience method which coerces the option in the specified section to a directory .
def getdir ( self , section , option , raw = False , vars = None , fallback = "" , validate = False ) : v = self . get ( section , option , raw = raw , vars = vars , fallback = fallback ) v = self . _convert_to_path ( v ) return v if not validate or os . path . isdir ( v ) else fallback
1,962
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/config.py#L65-L71
[ "def", "print_cols", "(", "words", ",", "print_func", ",", "termwidth", "=", "79", ")", ":", "width", "=", "max", "(", "[", "word_len", "(", "word", ")", "for", "word", "in", "words", "]", ")", "nwords", "=", "len", "(", "words", ")", "ncols", "=", "max", "(", "1", ",", "(", "termwidth", "+", "1", ")", "//", "(", "width", "+", "1", ")", ")", "nrows", "=", "(", "nwords", "+", "ncols", "-", "1", ")", "//", "ncols", "for", "row", "in", "range", "(", "nrows", ")", ":", "for", "i", "in", "range", "(", "row", ",", "nwords", ",", "nrows", ")", ":", "word", "=", "words", "[", "i", "]", "if", "word", "[", "0", "]", "==", "'\\x1b'", ":", "print_func", "(", "'%-*s'", "%", "(", "width", "+", "11", ",", "words", "[", "i", "]", ")", ",", "end", "=", "'\\n'", "if", "i", "+", "nrows", ">=", "nwords", "else", "' '", ")", "else", ":", "print_func", "(", "'%-*s'", "%", "(", "width", ",", "words", "[", "i", "]", ")", ",", "end", "=", "'\\n'", "if", "i", "+", "nrows", ">=", "nwords", "else", "' '", ")" ]
A convenience method which coerces the option in the specified section to a list of directories .
def getdirs ( self , section , option , raw = False , vars = None , fallback = [ ] ) : globs = self . getlist ( section , option , fallback = [ ] ) return [ f for g in globs for f in glob . glob ( g ) if os . path . isdir ( f ) ]
1,963
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/util/config.py#L73-L78
[ "def", "print_cols", "(", "words", ",", "print_func", ",", "termwidth", "=", "79", ")", ":", "width", "=", "max", "(", "[", "word_len", "(", "word", ")", "for", "word", "in", "words", "]", ")", "nwords", "=", "len", "(", "words", ")", "ncols", "=", "max", "(", "1", ",", "(", "termwidth", "+", "1", ")", "//", "(", "width", "+", "1", ")", ")", "nrows", "=", "(", "nwords", "+", "ncols", "-", "1", ")", "//", "ncols", "for", "row", "in", "range", "(", "nrows", ")", ":", "for", "i", "in", "range", "(", "row", ",", "nwords", ",", "nrows", ")", ":", "word", "=", "words", "[", "i", "]", "if", "word", "[", "0", "]", "==", "'\\x1b'", ":", "print_func", "(", "'%-*s'", "%", "(", "width", "+", "11", ",", "words", "[", "i", "]", ")", ",", "end", "=", "'\\n'", "if", "i", "+", "nrows", ">=", "nwords", "else", "' '", ")", "else", ":", "print_func", "(", "'%-*s'", "%", "(", "width", ",", "words", "[", "i", "]", ")", ",", "end", "=", "'\\n'", "if", "i", "+", "nrows", ">=", "nwords", "else", "' '", ")" ]
Register a new document .
def register ( self , name , content , description = None ) : return self . __app . documents . register ( name , content , self . _plugin , description )
1,964
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_documents_pattern.py#L66-L75
[ "def", "describe_topic", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "topics", "=", "list_topics", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "=", "{", "}", "for", "topic", ",", "arn", "in", "topics", ".", "items", "(", ")", ":", "if", "name", "in", "(", "topic", ",", "arn", ")", ":", "ret", "=", "{", "'TopicArn'", ":", "arn", "}", "ret", "[", "'Attributes'", "]", "=", "get_topic_attributes", "(", "arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "[", "'Subscriptions'", "]", "=", "list_subscriptions_by_topic", "(", "arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "# Grab extended attributes for the above subscriptions", "for", "sub", "in", "range", "(", "len", "(", "ret", "[", "'Subscriptions'", "]", ")", ")", ":", "sub_arn", "=", "ret", "[", "'Subscriptions'", "]", "[", "sub", "]", "[", "'SubscriptionArn'", "]", "if", "not", "sub_arn", ".", "startswith", "(", "'arn:aws:sns:'", ")", ":", "# Sometimes a sub is in e.g. PendingAccept or other", "# wierd states and doesn't have an ARN yet", "log", ".", "debug", "(", "'Subscription with invalid ARN %s skipped...'", ",", "sub_arn", ")", "continue", "deets", "=", "get_subscription_attributes", "(", "SubscriptionArn", "=", "sub_arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "[", "'Subscriptions'", "]", "[", "sub", "]", ".", "update", "(", "deets", ")", "return", "ret" ]
Unregisters an existing document so that this document is no longer available .
def unregister ( self , document ) : if document not in self . documents . keys ( ) : self . log . warning ( "Can not unregister document %s" % document ) else : del ( self . documents [ document ] ) self . __log . debug ( "Document %s got unregistered" % document )
1,965
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_documents_pattern.py#L116-L128
[ "def", "gaussian_prior_model_for_arguments", "(", "self", ",", "arguments", ")", ":", "new_model", "=", "copy", ".", "deepcopy", "(", "self", ")", "model_arguments", "=", "{", "t", ".", "name", ":", "arguments", "[", "t", ".", "prior", "]", "for", "t", "in", "self", ".", "direct_prior_tuples", "}", "for", "tuple_prior_tuple", "in", "self", ".", "tuple_prior_tuples", ":", "setattr", "(", "new_model", ",", "tuple_prior_tuple", ".", "name", ",", "tuple_prior_tuple", ".", "prior", ".", "gaussian_tuple_prior_for_arguments", "(", "arguments", ")", ")", "for", "prior_tuple", "in", "self", ".", "direct_prior_tuples", ":", "setattr", "(", "new_model", ",", "prior_tuple", ".", "name", ",", "model_arguments", "[", "prior_tuple", ".", "name", "]", ")", "for", "constant_tuple", "in", "self", ".", "constant_tuples", ":", "setattr", "(", "new_model", ",", "constant_tuple", ".", "name", ",", "constant_tuple", ".", "constant", ")", "for", "name", ",", "prior_model", "in", "self", ".", "direct_prior_model_tuples", ":", "setattr", "(", "new_model", ",", "name", ",", "prior_model", ".", "gaussian_prior_model_for_arguments", "(", "arguments", ")", ")", "return", "new_model" ]
Get one or more documents .
def get ( self , document = None , plugin = None ) : if plugin is not None : if document is None : documents_list = { } for key in self . documents . keys ( ) : if self . documents [ key ] . plugin == plugin : documents_list [ key ] = self . documents [ key ] return documents_list else : if document in self . documents . keys ( ) : if self . documents [ document ] . plugin == plugin : return self . documents [ document ] else : return None else : return None else : if document is None : return self . documents else : if document in self . documents . keys ( ) : return self . documents [ document ] else : return None
1,966
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/patterns/gw_documents_pattern.py#L130-L161
[ "def", "_get_threshold", "(", "self", ",", "graph", ",", "benchmark", ",", "entry_name", ")", ":", "if", "graph", ".", "params", ".", "get", "(", "'branch'", ")", ":", "branch_suffix", "=", "'@'", "+", "graph", ".", "params", ".", "get", "(", "'branch'", ")", "else", ":", "branch_suffix", "=", "''", "max_threshold", "=", "None", "for", "regex", ",", "threshold", "in", "six", ".", "iteritems", "(", "self", ".", "conf", ".", "regressions_thresholds", ")", ":", "if", "re", ".", "match", "(", "regex", ",", "entry_name", "+", "branch_suffix", ")", ":", "try", ":", "threshold", "=", "float", "(", "threshold", ")", "except", "ValueError", ":", "raise", "util", ".", "UserError", "(", "\"Non-float threshold in asv.conf.json: {!r}\"", ".", "format", "(", "threshold", ")", ")", "if", "max_threshold", "is", "None", ":", "max_threshold", "=", "threshold", "else", ":", "max_threshold", "=", "max", "(", "threshold", ",", "max_threshold", ")", "if", "max_threshold", "is", "None", ":", "max_threshold", "=", "0.05", "return", "max_threshold" ]
Initialises given plugins but does not activate them .
def initialise_by_names ( self , plugins = None ) : if plugins is None : plugins = [ ] self . _log . debug ( "Plugins Initialisation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to initialise: %s" % ", " . join ( plugins ) ) plugin_initialised = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) plugin_class = self . classes . get ( plugin_name ) self . initialise ( plugin_class . clazz , plugin_name ) plugin_initialised . append ( plugin_name ) self . _log . info ( "Plugins initialised: %s" % ", " . join ( plugin_initialised ) )
1,967
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L46-L79
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Activates given plugins .
def activate ( self , plugins = [ ] ) : self . _log . debug ( "Plugins Activation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to activate: %s" % ", " . join ( plugins ) ) plugins_activated = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) if plugin_name not in self . _plugins . keys ( ) and plugin_name in self . classes . _classes . keys ( ) : self . _log . debug ( "Initialisation needed before activation." ) try : self . initialise_by_names ( [ plugin_name ] ) except Exception as e : self . _log . error ( "Couldn't initialise plugin %s. Reason %s" % ( plugin_name , e ) ) if self . _app . strict : error = "Couldn't initialise plugin %s" % plugin_name if sys . version_info [ 0 ] < 3 : error += "Reason: %s" % e raise_from ( Exception ( error ) , e ) else : continue if plugin_name in self . _plugins . keys ( ) : self . _log . debug ( "Activating plugin %s" % plugin_name ) if not self . _plugins [ plugin_name ] . active : try : self . _plugins [ plugin_name ] . activate ( ) except Exception as e : raise_from ( PluginNotActivatableException ( "Plugin %s could not be activated: %s" % ( plugin_name , e ) ) , e ) else : self . _log . debug ( "Plugin %s activated" % plugin_name ) plugins_activated . append ( plugin_name ) else : self . _log . warning ( "Plugin %s got already activated." % plugin_name ) if self . _app . strict : raise PluginNotInitialisableException ( ) self . _log . info ( "Plugins activated: %s" % ", " . join ( plugins_activated ) )
1,968
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L129-L183
[ "def", "ReleaseFileObject", "(", "self", ",", "file_object", ")", ":", "identifier", ",", "cache_value", "=", "self", ".", "_file_object_cache", ".", "GetCacheValueByObject", "(", "file_object", ")", "if", "not", "identifier", ":", "raise", "RuntimeError", "(", "'Object not cached.'", ")", "if", "not", "cache_value", ":", "raise", "RuntimeError", "(", "'Invalid cache value.'", ")", "self", ".", "_file_object_cache", ".", "ReleaseObject", "(", "identifier", ")", "result", "=", "cache_value", ".", "IsDereferenced", "(", ")", "if", "result", ":", "self", ".", "_file_object_cache", ".", "RemoveObject", "(", "identifier", ")", "return", "result" ]
Deactivates given plugins .
def deactivate ( self , plugins = [ ] ) : self . _log . debug ( "Plugins Deactivation started" ) if not isinstance ( plugins , list ) : raise AttributeError ( "plugins must be a list, not %s" % type ( plugins ) ) self . _log . debug ( "Plugins to deactivate: %s" % ", " . join ( plugins ) ) plugins_deactivated = [ ] for plugin_name in plugins : if not isinstance ( plugin_name , str ) : raise AttributeError ( "plugin name must be a str, not %s" % type ( plugin_name ) ) if plugin_name not in self . _plugins . keys ( ) : self . _log . info ( "Unknown activated plugin %s" % plugin_name ) continue else : self . _log . debug ( "Deactivating plugin %s" % plugin_name ) if not self . _plugins [ plugin_name ] . active : self . _log . warning ( "Plugin %s seems to be already deactivated" % plugin_name ) else : try : self . _plugins [ plugin_name ] . deactivate ( ) except Exception as e : raise_from ( PluginNotDeactivatableException ( "Plugin %s could not be deactivated" % plugin_name ) , e ) else : self . _log . debug ( "Plugin %s deactivated" % plugin_name ) plugins_deactivated . append ( plugin_name ) self . _log . info ( "Plugins deactivated: %s" % ", " . join ( plugins_deactivated ) )
1,969
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L185-L227
[ "def", "generate_citation_counter", "(", "self", ")", ":", "cite_counter", "=", "dict", "(", ")", "filename", "=", "'%s.aux'", "%", "self", ".", "project_name", "with", "open", "(", "filename", ")", "as", "fobj", ":", "main_aux", "=", "fobj", ".", "read", "(", ")", "cite_counter", "[", "filename", "]", "=", "_count_citations", "(", "filename", ")", "for", "match", "in", "re", ".", "finditer", "(", "r'\\\\@input\\{(.*.aux)\\}'", ",", "main_aux", ")", ":", "filename", "=", "match", ".", "groups", "(", ")", "[", "0", "]", "try", ":", "counter", "=", "_count_citations", "(", "filename", ")", "except", "IOError", ":", "pass", "else", ":", "cite_counter", "[", "filename", "]", "=", "counter", "return", "cite_counter" ]
Returns the plugin object with the given name . Or if a name is not given the complete plugin dictionary is returned .
def get ( self , name = None ) : if name is None : return self . _plugins else : if name not in self . _plugins . keys ( ) : return None else : return self . _plugins [ name ]
1,970
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L229-L243
[ "def", "_compute_slab_correction_term", "(", "self", ",", "C", ",", "rrup", ")", ":", "slab_term", "=", "C", "[", "'SSL'", "]", "*", "np", ".", "log", "(", "rrup", ")", "return", "slab_term" ]
Returns True if plugin exists and is active . If plugin does not exist it returns None
def is_active ( self , name ) : if name in self . _plugins . keys ( ) : return self . _plugins [ "name" ] . active return None
1,971
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L255-L265
[ "def", "align_to_mmap", "(", "num", ",", "round_up", ")", ":", "res", "=", "(", "num", "//", "ALLOCATIONGRANULARITY", ")", "*", "ALLOCATIONGRANULARITY", "if", "round_up", "and", "(", "res", "!=", "num", ")", ":", "res", "+=", "ALLOCATIONGRANULARITY", "# END handle size", "return", "res" ]
Registers new plugins .
def register ( self , classes = [ ] ) : if not isinstance ( classes , list ) : raise AttributeError ( "plugins must be a list, not %s." % type ( classes ) ) plugin_registered = [ ] for plugin_class in classes : plugin_name = plugin_class . __name__ self . register_class ( plugin_class , plugin_name ) self . _log . debug ( "Plugin %s registered" % plugin_name ) plugin_registered . append ( plugin_name ) self . _log . info ( "Plugins registered: %s" % ", " . join ( plugin_registered ) )
1,972
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L331-L354
[ "def", "ReleaseFileObject", "(", "self", ",", "file_object", ")", ":", "identifier", ",", "cache_value", "=", "self", ".", "_file_object_cache", ".", "GetCacheValueByObject", "(", "file_object", ")", "if", "not", "identifier", ":", "raise", "RuntimeError", "(", "'Object not cached.'", ")", "if", "not", "cache_value", ":", "raise", "RuntimeError", "(", "'Invalid cache value.'", ")", "self", ".", "_file_object_cache", ".", "ReleaseObject", "(", "identifier", ")", "result", "=", "cache_value", ".", "IsDereferenced", "(", ")", "if", "result", ":", "self", ".", "_file_object_cache", ".", "RemoveObject", "(", "identifier", ")", "return", "result" ]
Returns the plugin class object with the given name . Or if a name is not given the complete plugin dictionary is returned .
def get ( self , name = None ) : if name is None : return self . _classes else : if name not in self . _classes . keys ( ) : return None else : return self . _classes [ name ]
1,973
https://github.com/useblocks/groundwork/blob/d34fce43f54246ca4db0f7b89e450dcdc847c68c/groundwork/pluginmanager.py#L390-L404
[ "def", "setSr", "(", "self", ",", "fs", ")", ":", "self", ".", "tracePlot", ".", "setSr", "(", "fs", ")", "self", ".", "stimPlot", ".", "setSr", "(", "fs", ")" ]
Writes the . sln file to disk .
def write ( self ) : filters = { 'MSGUID' : lambda x : ( '{%s}' % x ) . upper ( ) , 'relslnfile' : lambda x : os . path . relpath ( x , os . path . dirname ( self . FileName ) ) } context = { 'sln' : self } return self . render ( self . __jinja_template__ , self . FileName , context , filters )
1,974
https://github.com/dbarsam/python-vsgen/blob/640191bb018a1ff7d7b7a4982e0d3c1a423ba878/vsgen/solution.py#L49-L60
[ "def", "delete", "(", "request", ",", "obj_id", "=", "None", ")", ":", "data", "=", "request", ".", "DELETE", "or", "json", ".", "loads", "(", "request", ".", "body", ")", "guids", "=", "data", ".", "get", "(", "'guids'", ")", ".", "split", "(", "','", ")", "objects", "=", "getObjectsFromGuids", "(", "guids", ")", "gallery", "=", "Gallery", ".", "objects", ".", "get", "(", "pk", "=", "obj_id", ")", "LOGGER", ".", "info", "(", "'{} removed {} from {}'", ".", "format", "(", "request", ".", "user", ".", "email", ",", "guids", ",", "gallery", ")", ")", "for", "o", "in", "objects", ":", "if", "isinstance", "(", "o", ",", "Image", ")", ":", "gallery", ".", "images", ".", "remove", "(", "o", ")", "elif", "isinstance", "(", "o", ",", "Video", ")", ":", "gallery", ".", "videos", ".", "remove", "(", "o", ")", "res", "=", "Result", "(", ")", "return", "JsonResponse", "(", "res", ".", "asDict", "(", ")", ")" ]
Loads cell annotations .
def load_annotations ( self , aname , sep = ',' ) : ann = pd . read_csv ( aname ) cell_names = np . array ( list ( self . adata . obs_names ) ) all_cell_names = np . array ( list ( self . adata_raw . obs_names ) ) if ( ann . shape [ 1 ] > 1 ) : ann = pd . read_csv ( aname , index_col = 0 , sep = sep ) if ( ann . shape [ 0 ] != all_cell_names . size ) : ann = pd . read_csv ( aname , index_col = 0 , header = None , sep = sep ) else : if ( ann . shape [ 0 ] != all_cell_names . size ) : ann = pd . read_csv ( aname , header = None , sep = sep ) ann . index = np . array ( list ( ann . index . astype ( '<U100' ) ) ) ann1 = np . array ( list ( ann . T [ cell_names ] . T . values . flatten ( ) ) ) ann2 = np . array ( list ( ann . values . flatten ( ) ) ) self . adata_raw . obs [ 'annotations' ] = pd . Categorical ( ann2 ) self . adata . obs [ 'annotations' ] = pd . Categorical ( ann1 )
1,975
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L460-L489
[ "def", "density_hub", "(", "self", ",", "weather_df", ")", ":", "if", "self", ".", "density_model", "!=", "'interpolation_extrapolation'", ":", "temperature_hub", "=", "self", ".", "temperature_hub", "(", "weather_df", ")", "# Calculation of density in kg/m³ at hub height", "if", "self", ".", "density_model", "==", "'barometric'", ":", "logging", ".", "debug", "(", "'Calculating density using barometric height '", "'equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "barometric", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'ideal_gas'", ":", "logging", ".", "debug", "(", "'Calculating density using ideal gas equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "ideal_gas", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'interpolation_extrapolation'", ":", "logging", ".", "debug", "(", "'Calculating density using linear inter- or '", "'extrapolation.'", ")", "density_hub", "=", "tools", ".", "linear_interpolation_extrapolation", "(", "weather_df", "[", "'density'", "]", ",", "self", ".", "power_plant", ".", "hub_height", ")", "else", ":", "raise", "ValueError", "(", "\"'{0}' is an invalid value. \"", ".", "format", "(", "self", ".", "density_model", ")", "+", "\"`density_model` \"", "+", "\"must be 'barometric', 'ideal_gas' or \"", "+", "\"'interpolation_extrapolation'.\"", ")", "return", "density_hub" ]
Computes the spatial dispersion factors for each gene .
def dispersion_ranking_NN ( self , nnm , num_norm_avg = 50 ) : self . knn_avg ( nnm ) D_avg = self . adata . layers [ 'X_knn_avg' ] mu , var = sf . mean_variance_axis ( D_avg , axis = 0 ) dispersions = np . zeros ( var . size ) dispersions [ mu > 0 ] = var [ mu > 0 ] / mu [ mu > 0 ] self . adata . var [ 'spatial_dispersions' ] = dispersions . copy ( ) ma = np . sort ( dispersions ) [ - num_norm_avg : ] . mean ( ) dispersions [ dispersions >= ma ] = ma weights = ( ( dispersions / dispersions . max ( ) ) ** 0.5 ) . flatten ( ) self . adata . var [ 'weights' ] = weights return weights
1,976
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L491-L532
[ "def", "json_engine", "(", "self", ",", "req", ")", ":", "# pylint: disable=R0201,W0613", "try", ":", "return", "stats", ".", "engine_data", "(", "config", ".", "engine", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "torrent_exc", ":", "raise", "exc", ".", "HTTPInternalServerError", "(", "str", "(", "torrent_exc", ")", ")" ]
Plots orthogonal expression patterns .
def plot_correlated_groups ( self , group = None , n_genes = 5 , * * kwargs ) : geneID_groups = self . adata . uns [ 'gene_groups' ] if ( group is None ) : for i in range ( len ( geneID_groups ) ) : self . show_gene_expression ( geneID_groups [ i ] [ 0 ] , * * kwargs ) else : for i in range ( n_genes ) : self . show_gene_expression ( geneID_groups [ group ] [ i ] , * * kwargs )
1,977
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L857-L885
[ "def", "_fetch_cdn_data", "(", "self", ")", ":", "if", "self", ".", "_cdn_enabled", "is", "FAULT", ":", "headers", "=", "self", ".", "manager", ".", "fetch_cdn_data", "(", "self", ")", "else", ":", "headers", "=", "{", "}", "# Set defaults in case not all headers are present.", "self", ".", "_set_cdn_defaults", "(", ")", "if", "not", "headers", ":", "# Not CDN enabled; return", "return", "else", ":", "self", ".", "_cdn_enabled", "=", "True", "for", "key", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "low_key", "=", "key", ".", "lower", "(", ")", "if", "low_key", "==", "\"x-cdn-uri\"", ":", "self", ".", "_cdn_uri", "=", "value", "elif", "low_key", "==", "\"x-ttl\"", ":", "self", ".", "_cdn_ttl", "=", "int", "(", "value", ")", "elif", "low_key", "==", "\"x-cdn-ssl-uri\"", ":", "self", ".", "_cdn_ssl_uri", "=", "value", "elif", "low_key", "==", "\"x-cdn-streaming-uri\"", ":", "self", ".", "_cdn_streaming_uri", "=", "value", "elif", "low_key", "==", "\"x-cdn-ios-uri\"", ":", "self", ".", "_cdn_ios_uri", "=", "value", "elif", "low_key", "==", "\"x-log-retention\"", ":", "self", ".", "_cdn_log_retention", "=", "(", "value", "==", "\"True\"", ")" ]
Plots gene expression patterns correlated with the input gene .
def plot_correlated_genes ( self , name , n_genes = 5 , number_of_features = 1000 , * * kwargs ) : all_gene_names = np . array ( list ( self . adata . var_names ) ) if ( ( all_gene_names == name ) . sum ( ) == 0 ) : print ( "Gene not found in the filtered dataset. Note that genes " "are case sensitive." ) return sds = self . corr_bin_genes ( input_gene = name , number_of_features = number_of_features ) if ( n_genes + 1 > sds . size ) : x = sds . size else : x = n_genes + 1 for i in range ( 1 , x ) : self . show_gene_expression ( sds [ i ] , * * kwargs ) return sds [ 1 : ]
1,978
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L887-L924
[ "def", "_configure_manager", "(", "self", ")", ":", "self", ".", "_manager", "=", "CloudBlockStorageManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolume", ",", "response_key", "=", "\"volume\"", ",", "uri_base", "=", "\"volumes\"", ")", "self", ".", "_types_manager", "=", "BaseManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolumeType", ",", "response_key", "=", "\"volume_type\"", ",", "uri_base", "=", "\"types\"", ")", "self", ".", "_snapshot_manager", "=", "CloudBlockStorageSnapshotManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageSnapshot", ",", "response_key", "=", "\"snapshot\"", ",", "uri_base", "=", "\"snapshots\"", ")" ]
Wrapper for sklearn s t - SNE implementation .
def run_tsne ( self , X = None , metric = 'correlation' , * * kwargs ) : if ( X is not None ) : dt = man . TSNE ( metric = metric , * * kwargs ) . fit_transform ( X ) return dt else : dt = man . TSNE ( metric = self . distance , * * kwargs ) . fit_transform ( self . adata . obsm [ 'X_pca' ] ) tsne2d = dt self . adata . obsm [ 'X_tsne' ] = tsne2d
1,979
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1011-L1026
[ "def", "_SeparateTypes", "(", "self", ",", "metadata_value_pairs", ")", ":", "registry_pairs", "=", "[", "]", "file_pairs", "=", "[", "]", "match_pairs", "=", "[", "]", "for", "metadata", ",", "result", "in", "metadata_value_pairs", ":", "if", "(", "result", ".", "stat_entry", ".", "pathspec", ".", "pathtype", "==", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", ":", "registry_pairs", ".", "append", "(", "(", "metadata", ",", "result", ".", "stat_entry", ")", ")", "else", ":", "file_pairs", ".", "append", "(", "(", "metadata", ",", "result", ")", ")", "match_pairs", ".", "extend", "(", "[", "(", "metadata", ",", "match", ")", "for", "match", "in", "result", ".", "matches", "]", ")", "return", "registry_pairs", ",", "file_pairs", ",", "match_pairs" ]
Wrapper for umap - learn .
def run_umap ( self , X = None , metric = None , * * kwargs ) : import umap as umap if metric is None : metric = self . distance if ( X is not None ) : umap_obj = umap . UMAP ( metric = metric , * * kwargs ) dt = umap_obj . fit_transform ( X ) return dt else : umap_obj = umap . UMAP ( metric = metric , * * kwargs ) umap2d = umap_obj . fit_transform ( self . adata . obsm [ 'X_pca' ] ) self . adata . obsm [ 'X_umap' ] = umap2d
1,980
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1028-L1048
[ "def", "parse_atom_site", "(", "self", ",", "name", ",", "attributes", ")", ":", "if", "name", "==", "\"PDBx:pdbx_PDB_ins_code\"", ":", "assert", "(", "not", "(", "self", ".", "current_atom_site", ".", "ATOMResidueiCodeIsNull", ")", ")", "if", "attributes", ".", "get", "(", "'xsi:nil'", ")", "==", "'true'", ":", "self", ".", "current_atom_site", ".", "ATOMResidueiCodeIsNull", "=", "True", "if", "name", "==", "\"PDBx:auth_asym_id\"", ":", "assert", "(", "not", "(", "self", ".", "current_atom_site", ".", "PDBChainIDIsNull", ")", ")", "if", "attributes", ".", "get", "(", "'xsi:nil'", ")", "==", "'true'", ":", "self", ".", "current_atom_site", ".", "PDBChainIDIsNull", "=", "True" ]
Display a scatter plot .
def scatter ( self , projection = None , c = None , cmap = 'rainbow' , linewidth = 0.0 , edgecolor = 'k' , axes = None , colorbar = True , s = 10 , * * kwargs ) : if ( not PLOTTING ) : print ( "matplotlib not installed!" ) else : if ( isinstance ( projection , str ) ) : try : dt = self . adata . obsm [ projection ] except KeyError : print ( 'Please create a projection first using run_umap or' 'run_tsne' ) elif ( projection is None ) : try : dt = self . adata . obsm [ 'X_umap' ] except KeyError : try : dt = self . adata . obsm [ 'X_tsne' ] except KeyError : print ( "Please create either a t-SNE or UMAP projection" "first." ) return else : dt = projection if ( axes is None ) : plt . figure ( ) axes = plt . gca ( ) if ( c is None ) : plt . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , s = s , linewidth = linewidth , edgecolor = edgecolor , * * kwargs ) else : if isinstance ( c , str ) : try : c = self . adata . obs [ c ] . get_values ( ) except KeyError : 0 # do nothing if ( ( isinstance ( c [ 0 ] , str ) or isinstance ( c [ 0 ] , np . str_ ) ) and ( isinstance ( c , np . ndarray ) or isinstance ( c , list ) ) ) : i = ut . convert_annotations ( c ) ui , ai = np . unique ( i , return_index = True ) cax = axes . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , c = i , cmap = cmap , s = s , linewidth = linewidth , edgecolor = edgecolor , * * kwargs ) if ( colorbar ) : cbar = plt . colorbar ( cax , ax = axes , ticks = ui ) cbar . ax . set_yticklabels ( c [ ai ] ) else : if not ( isinstance ( c , np . ndarray ) or isinstance ( c , list ) ) : colorbar = False i = c cax = axes . scatter ( dt [ : , 0 ] , dt [ : , 1 ] , c = i , cmap = cmap , s = s , linewidth = linewidth , edgecolor = edgecolor , * * kwargs ) if ( colorbar ) : plt . colorbar ( cax , ax = axes )
1,981
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1077-L1175
[ "def", "_set_vibration_nix", "(", "self", ",", "left_motor", ",", "right_motor", ",", "duration", ")", ":", "code", "=", "self", ".", "__get_vibration_code", "(", "left_motor", ",", "right_motor", ",", "duration", ")", "secs", ",", "msecs", "=", "convert_timeval", "(", "time", ".", "time", "(", ")", ")", "outer_event", "=", "struct", ".", "pack", "(", "EVENT_FORMAT", ",", "secs", ",", "msecs", ",", "0x15", ",", "code", ",", "1", ")", "self", ".", "_write_device", ".", "write", "(", "outer_event", ")", "self", ".", "_write_device", ".", "flush", "(", ")" ]
Display a gene s expressions .
def show_gene_expression ( self , gene , avg = True , axes = None , * * kwargs ) : all_gene_names = np . array ( list ( self . adata . var_names ) ) cell_names = np . array ( list ( self . adata . obs_names ) ) all_cell_names = np . array ( list ( self . adata_raw . obs_names ) ) idx = np . where ( all_gene_names == gene ) [ 0 ] name = gene if ( idx . size == 0 ) : print ( "Gene note found in the filtered dataset. Note that genes " "are case sensitive." ) return if ( avg ) : a = self . adata . layers [ 'X_knn_avg' ] [ : , idx ] . toarray ( ) . flatten ( ) if a . sum ( ) == 0 : a = np . log2 ( self . adata_raw . X [ np . in1d ( all_cell_names , cell_names ) , : ] [ : , idx ] . toarray ( ) . flatten ( ) + 1 ) else : a = np . log2 ( self . adata_raw . X [ np . in1d ( all_cell_names , cell_names ) , : ] [ : , idx ] . toarray ( ) . flatten ( ) + 1 ) if axes is None : plt . figure ( ) axes = plt . gca ( ) self . scatter ( c = a , axes = axes , * * kwargs ) axes . set_title ( name )
1,982
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1177-L1231
[ "def", "merge_hooks", "(", "request_hooks", ",", "session_hooks", ",", "dict_class", "=", "OrderedDict", ")", ":", "if", "session_hooks", "is", "None", "or", "session_hooks", ".", "get", "(", "'response'", ")", "==", "[", "]", ":", "return", "request_hooks", "if", "request_hooks", "is", "None", "or", "request_hooks", ".", "get", "(", "'response'", ")", "==", "[", "]", ":", "return", "session_hooks", "return", "merge_setting", "(", "request_hooks", ",", "session_hooks", ",", "dict_class", ")" ]
Runs Louvain clustering using the vtraag implementation . Assumes that louvain optional dependency is installed .
def louvain_clustering ( self , X = None , res = 1 , method = 'modularity' ) : if X is None : X = self . adata . uns [ 'neighbors' ] [ 'connectivities' ] save = True else : if not sp . isspmatrix_csr ( X ) : X = sp . csr_matrix ( X ) save = False import igraph as ig import louvain adjacency = sparse_knn ( X . dot ( X . T ) / self . k , self . k ) . tocsr ( ) sources , targets = adjacency . nonzero ( ) weights = adjacency [ sources , targets ] if isinstance ( weights , np . matrix ) : weights = weights . A1 g = ig . Graph ( directed = True ) g . add_vertices ( adjacency . shape [ 0 ] ) g . add_edges ( list ( zip ( sources , targets ) ) ) try : g . es [ 'weight' ] = weights except BaseException : pass if method == 'significance' : cl = louvain . find_partition ( g , louvain . SignificanceVertexPartition ) else : cl = louvain . find_partition ( g , louvain . RBConfigurationVertexPartition , resolution_parameter = res ) if save : self . adata . obs [ 'louvain_clusters' ] = pd . Categorical ( np . array ( cl . membership ) ) else : return np . array ( cl . membership )
1,983
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1265-L1316
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Performs k - means clustering .
def kmeans_clustering ( self , numc , X = None , npcs = 15 ) : from sklearn . cluster import KMeans if X is None : D_sub = self . adata . uns [ 'X_processed' ] X = ( D_sub - D_sub . mean ( 0 ) ) . dot ( self . adata . uns [ 'pca_obj' ] . components_ [ : npcs , : ] . T ) save = True else : save = False cl = KMeans ( n_clusters = numc ) . fit_predict ( Normalizer ( ) . fit_transform ( X ) ) if save : self . adata . obs [ 'kmeans_clusters' ] = pd . Categorical ( cl ) else : return cl
1,984
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1318-L1350
[ "def", "post", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "try", ":", "kwargs", "=", "self", ".", "load_object", "(", "kwargs", ")", "except", "Exception", "as", "e", ":", "return", "self", ".", "render_te_response", "(", "{", "'title'", ":", "str", "(", "e", ")", ",", "}", ")", "if", "not", "self", ".", "has_permission", "(", "request", ")", ":", "return", "self", ".", "render_te_response", "(", "{", "'title'", ":", "'No access'", ",", "}", ")", "return", "self", ".", "render_te_response", "(", "self", ".", "handle_dialog", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
Ranks marker genes for each cluster using a random forest classification approach .
def identify_marker_genes_rf ( self , labels = None , clusters = None , n_genes = 4000 ) : if ( labels is None ) : try : keys = np . array ( list ( self . adata . obs_keys ( ) ) ) lbls = self . adata . obs [ ut . search_string ( keys , '_clusters' ) [ 0 ] [ 0 ] ] . get_values ( ) except KeyError : print ( "Please generate cluster labels first or set the " "'labels' keyword argument." ) return elif isinstance ( labels , str ) : lbls = self . adata . obs [ labels ] . get_values ( ) . flatten ( ) else : lbls = labels from sklearn . ensemble import RandomForestClassifier markers = { } if clusters == None : lblsu = np . unique ( lbls ) else : lblsu = np . unique ( clusters ) indices = np . argsort ( - self . adata . var [ 'weights' ] . values ) X = self . adata . layers [ 'X_disp' ] [ : , indices [ : n_genes ] ] . toarray ( ) for K in range ( lblsu . size ) : print ( K ) y = np . zeros ( lbls . size ) y [ lbls == lblsu [ K ] ] = 1 clf = RandomForestClassifier ( n_estimators = 100 , max_depth = None , random_state = 0 ) clf . fit ( X , y ) idx = np . argsort ( - clf . feature_importances_ ) markers [ lblsu [ K ] ] = self . adata . uns [ 'ranked_genes' ] [ idx ] if clusters is None : self . adata . uns [ 'marker_genes_rf' ] = markers return markers
1,985
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1404-L1471
[ "def", "_count_devices", "(", "self", ")", ":", "number_of_devices", "=", "ctypes", ".", "c_uint", "(", ")", "if", "ctypes", ".", "windll", ".", "user32", ".", "GetRawInputDeviceList", "(", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_int", ")", "(", ")", ",", "ctypes", ".", "byref", "(", "number_of_devices", ")", ",", "ctypes", ".", "sizeof", "(", "RawInputDeviceList", ")", ")", "==", "-", "1", ":", "warn", "(", "\"Call to GetRawInputDeviceList was unsuccessful.\"", "\"We have no idea if a mouse or keyboard is attached.\"", ",", "RuntimeWarning", ")", "return", "devices_found", "=", "(", "RawInputDeviceList", "*", "number_of_devices", ".", "value", ")", "(", ")", "if", "ctypes", ".", "windll", ".", "user32", ".", "GetRawInputDeviceList", "(", "devices_found", ",", "ctypes", ".", "byref", "(", "number_of_devices", ")", ",", "ctypes", ".", "sizeof", "(", "RawInputDeviceList", ")", ")", "==", "-", "1", ":", "warn", "(", "\"Call to GetRawInputDeviceList was unsuccessful.\"", "\"We have no idea if a mouse or keyboard is attached.\"", ",", "RuntimeWarning", ")", "return", "for", "device", "in", "devices_found", ":", "if", "device", ".", "dwType", "==", "0", ":", "self", ".", "_raw_device_counts", "[", "'mice'", "]", "+=", "1", "elif", "device", ".", "dwType", "==", "1", ":", "self", ".", "_raw_device_counts", "[", "'keyboards'", "]", "+=", "1", "elif", "device", ".", "dwType", "==", "2", ":", "self", ".", "_raw_device_counts", "[", "'otherhid'", "]", "+=", "1", "else", ":", "self", ".", "_raw_device_counts", "[", "'unknown'", "]", "+=", "1" ]
Ranking marker genes based on their respective magnitudes in the correlation dot products with cluster - specific reference expression profiles .
def identify_marker_genes_corr ( self , labels = None , n_genes = 4000 ) : if ( labels is None ) : try : keys = np . array ( list ( self . adata . obs_keys ( ) ) ) lbls = self . adata . obs [ ut . search_string ( keys , '_clusters' ) [ 0 ] [ 0 ] ] . get_values ( ) except KeyError : print ( "Please generate cluster labels first or set the " "'labels' keyword argument." ) return elif isinstance ( labels , str ) : lbls = self . adata . obs [ labels ] . get_values ( ) . flatten ( ) else : lbls = labels w = self . adata . var [ 'weights' ] . values s = StandardScaler ( ) idxg = np . argsort ( - w ) [ : n_genes ] y1 = s . fit_transform ( self . adata . layers [ 'X_disp' ] [ : , idxg ] . A ) * w [ idxg ] all_gene_names = np . array ( list ( self . adata . var_names ) ) [ idxg ] markers = { } lblsu = np . unique ( lbls ) for i in lblsu : Gcells = np . array ( list ( self . adata . obs_names [ lbls == i ] ) ) z1 = y1 [ np . in1d ( self . adata . obs_names , Gcells ) , : ] m1 = ( z1 - z1 . mean ( 1 ) [ : , None ] ) / z1 . std ( 1 ) [ : , None ] ref = z1 . mean ( 0 ) ref = ( ref - ref . mean ( ) ) / ref . std ( ) g2 = ( m1 * ref ) . mean ( 0 ) markers [ i ] = all_gene_names [ np . argsort ( - g2 ) ] self . adata . uns [ 'marker_genes_corr' ] = markers return markers
1,986
https://github.com/atarashansky/self-assembling-manifold/blob/4db4793f65af62047492327716932ba81a67f679/SAM.py#L1521-L1575
[ "def", "GetDateRange", "(", "self", ")", ":", "start", "=", "self", ".", "start_date", "end", "=", "self", ".", "end_date", "for", "date", ",", "(", "exception_type", ",", "_", ")", "in", "self", ".", "date_exceptions", ".", "items", "(", ")", ":", "if", "exception_type", "==", "self", ".", "_EXCEPTION_TYPE_REMOVE", ":", "continue", "if", "not", "start", "or", "(", "date", "<", "start", ")", ":", "start", "=", "date", "if", "not", "end", "or", "(", "date", ">", "end", ")", ":", "end", "=", "date", "if", "start", "is", "None", ":", "start", "=", "end", "elif", "end", "is", "None", ":", "end", "=", "start", "# If start and end are None we did a little harmless shuffling", "return", "(", "start", ",", "end", ")" ]
Add ability are allowed using two arguments .
def add ( self , action = None , subject = None , * * conditions ) : self . add_rule ( Rule ( True , action , subject , * * conditions ) )
1,987
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L21-L35
[ "def", "metadata", "(", "self", ")", ":", "sheet", "=", "self", ".", "result", ".", "add_sheet", "(", "\"metadata\"", ")", "self", ".", "header", "(", "sheet", ",", "\"metadata\"", ")", "n_row", "=", "1", "# row number", "for", "k", "in", "self", ".", "po", ".", "metadata", ":", "row", "=", "sheet", ".", "row", "(", "n_row", ")", "row", ".", "write", "(", "0", ",", "k", ")", "row", ".", "write", "(", "1", ",", "self", ".", "po", ".", "metadata", "[", "k", "]", ")", "n_row", "+=", "1", "sheet", ".", "flush_row_data", "(", ")" ]
Defines an ability which cannot be done .
def addnot ( self , action = None , subject = None , * * conditions ) : self . add_rule ( Rule ( False , action , subject , * * conditions ) )
1,988
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L37-L41
[ "def", "compose", "(", "list_of_files", ",", "destination_file", ",", "files_metadata", "=", "None", ",", "content_type", "=", "None", ",", "retry_params", "=", "None", ",", "_account_id", "=", "None", ")", ":", "api", "=", "storage_api", ".", "_get_storage_api", "(", "retry_params", "=", "retry_params", ",", "account_id", "=", "_account_id", ")", "if", "os", ".", "getenv", "(", "'SERVER_SOFTWARE'", ")", ".", "startswith", "(", "'Dev'", ")", ":", "def", "_temp_func", "(", "file_list", ",", "destination_file", ",", "content_type", ")", ":", "bucket", "=", "'/'", "+", "destination_file", ".", "split", "(", "'/'", ")", "[", "1", "]", "+", "'/'", "with", "open", "(", "destination_file", ",", "'w'", ",", "content_type", "=", "content_type", ")", "as", "gcs_merge", ":", "for", "source_file", "in", "file_list", ":", "with", "open", "(", "bucket", "+", "source_file", "[", "'Name'", "]", ",", "'r'", ")", "as", "gcs_source", ":", "gcs_merge", ".", "write", "(", "gcs_source", ".", "read", "(", ")", ")", "compose_object", "=", "_temp_func", "else", ":", "compose_object", "=", "api", ".", "compose_object", "file_list", ",", "_", "=", "_validate_compose_list", "(", "destination_file", ",", "list_of_files", ",", "files_metadata", ",", "32", ")", "compose_object", "(", "file_list", ",", "destination_file", ",", "content_type", ")" ]
Check if the user has permission to perform a given action on an object
def can ( self , action , subject , * * conditions ) : for rule in self . relevant_rules_for_match ( action , subject ) : if rule . matches_conditions ( action , subject , * * conditions ) : return rule . base_behavior return False
1,989
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L46-L53
[ "def", "send_zipfile", "(", "request", ",", "fileList", ")", ":", "temp", "=", "tempfile", ".", "TemporaryFile", "(", ")", "archive", "=", "zipfile", ".", "ZipFile", "(", "temp", ",", "'w'", ",", "zipfile", ".", "ZIP_DEFLATED", ")", "for", "artist", ",", "files", "in", "fileList", ".", "iteritems", "(", ")", ":", "for", "f", "in", "files", ":", "archive", ".", "write", "(", "f", "[", "0", "]", ",", "'%s/%s'", "%", "(", "artist", ",", "f", "[", "1", "]", ")", ")", "archive", ".", "close", "(", ")", "wrapper", "=", "FixedFileWrapper", "(", "temp", ")", "response", "=", "HttpResponse", "(", "wrapper", ",", "content_type", "=", "'application/zip'", ")", "response", "[", "'Content-Disposition'", "]", "=", "'attachment; filename=FrogSources.zip'", "response", "[", "'Content-Length'", "]", "=", "temp", ".", "tell", "(", ")", "temp", ".", "seek", "(", "0", ")", "return", "response" ]
retrive match action and subject
def relevant_rules_for_match ( self , action , subject ) : matches = [ ] for rule in self . rules : rule . expanded_actions = self . expand_actions ( rule . actions ) if rule . is_relevant ( action , subject ) : matches . append ( rule ) return self . optimize ( matches [ : : - 1 ] )
1,990
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L62-L70
[ "def", "dump", "(", "self", ",", "filename", "=", "None", ",", "compressed", "=", "True", ",", "pretty", "=", "True", ")", ":", "content", "=", "self", ".", "server", ".", "jsonrpc", ".", "dumpWindowHierarchy", "(", "compressed", ",", "None", ")", "if", "filename", ":", "with", "open", "(", "filename", ",", "\"wb\"", ")", "as", "f", ":", "f", ".", "write", "(", "content", ".", "encode", "(", "\"utf-8\"", ")", ")", "if", "pretty", "and", "\"\\n \"", "not", "in", "content", ":", "xml_text", "=", "xml", ".", "dom", ".", "minidom", ".", "parseString", "(", "content", ".", "encode", "(", "\"utf-8\"", ")", ")", "content", "=", "U", "(", "xml_text", ".", "toprettyxml", "(", "indent", "=", "' '", ")", ")", "return", "content" ]
Accepts an array of actions and returns an array of actions which match
def expand_actions ( self , actions ) : r = [ ] for action in actions : r . append ( action ) if action in self . aliased_actions : r . extend ( self . aliased_actions [ action ] ) return r
1,991
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L81-L90
[ "def", "get_handle", "(", ")", ":", "global", "__handle__", "if", "not", "__handle__", ":", "__handle__", "=", "FT_Library", "(", ")", "error", "=", "FT_Init_FreeType", "(", "byref", "(", "__handle__", ")", ")", "if", "error", ":", "raise", "RuntimeError", "(", "hex", "(", "error", ")", ")", "return", "__handle__" ]
Alias one or more actions into another one .
def alias_action ( self , * args , * * kwargs ) : to = kwargs . pop ( 'to' , None ) if not to : return error_message = ( "You can't specify target ({}) as alias " "because it is real action name" . format ( to ) ) if to in list ( itertools . chain ( * self . aliased_actions . values ( ) ) ) : raise Exception ( error_message ) self . aliased_actions . setdefault ( to , [ ] ) . extend ( args )
1,992
https://github.com/cocoakekeyu/cancan/blob/f198d560e6e008e6c5580ba55581a939a5d544ed/cancan/ability.py#L92-L108
[ "def", "get_cdn_metadata", "(", "self", ",", "container", ")", ":", "uri", "=", "\"%s/%s\"", "%", "(", "self", ".", "uri_base", ",", "utils", ".", "get_name", "(", "container", ")", ")", "resp", ",", "resp_body", "=", "self", ".", "api", ".", "cdn_request", "(", "uri", ",", "\"HEAD\"", ")", "ret", "=", "dict", "(", "resp", ".", "headers", ")", "# Remove non-CDN headers", "ret", ".", "pop", "(", "\"content-length\"", ",", "None", ")", "ret", ".", "pop", "(", "\"content-type\"", ",", "None", ")", "ret", ".", "pop", "(", "\"date\"", ",", "None", ")", "return", "ret" ]
Convenience wrapper for database SELECT and fetch all .
def fetch ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , * * kwargs ) : return select ( table , cols , where , group , order , limit , * * kwargs ) . fetchall ( )
1,993
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L24-L26
[ "def", "_run_lint_on_file_stamped", "(", "*", "args", ")", ":", "# We pass an empty dictionary as keyword arguments here to work", "# around a bug in frosted, which crashes when no keyword arguments", "# are passed", "#", "# suppress(E204)", "stamp_args", ",", "stamp_kwargs", "=", "_run_lint_on_file_stamped_args", "(", "*", "args", ",", "*", "*", "{", "}", ")", "return", "jobstamp", ".", "run", "(", "_run_lint_on_file_exceptions", ",", "*", "stamp_args", ",", "*", "*", "stamp_kwargs", ")" ]
Convenience wrapper for database SELECT and fetch one .
def fetchone ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , * * kwargs ) : return select ( table , cols , where , group , order , limit , * * kwargs ) . fetchone ( )
1,994
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L29-L31
[ "def", "metricCompute", "(", "self", ",", "sensorToBody", ",", "bodyToSpecificObject", ")", ":", "overlaps", "=", "self", ".", "metricConnections", ".", "computeActivity", "(", "{", "\"bodyToSpecificObject\"", ":", "bodyToSpecificObject", ",", "\"sensorToBody\"", ":", "sensorToBody", ",", "}", ")", "self", ".", "activeMetricSegments", "=", "np", ".", "where", "(", "overlaps", ">=", "2", ")", "[", "0", "]", "self", ".", "activeCells", "=", "np", ".", "unique", "(", "self", ".", "metricConnections", ".", "mapSegmentsToCells", "(", "self", ".", "activeMetricSegments", ")", ")" ]
Convenience wrapper for database INSERT .
def insert ( table , values = ( ) , * * kwargs ) : values = dict ( values , * * kwargs ) . items ( ) sql , args = makeSQL ( "INSERT" , table , values = values ) return execute ( sql , args ) . lastrowid
1,995
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L34-L38
[ "def", "enable", "(", "self", ")", ":", "if", "not", "CrashReporter", ".", "active", ":", "CrashReporter", ".", "active", "=", "True", "# Store this function so we can set it back if the CrashReporter is deactivated", "self", ".", "_excepthook", "=", "sys", ".", "excepthook", "sys", ".", "excepthook", "=", "self", ".", "exception_handler", "self", ".", "logger", ".", "info", "(", "'CrashReporter: Enabled'", ")", "if", "self", ".", "report_dir", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "report_dir", ")", ":", "if", "self", ".", "get_offline_reports", "(", ")", ":", "# First attempt to send the reports, if that fails then start the watcher", "self", ".", "submit_offline_reports", "(", ")", "remaining_reports", "=", "len", "(", "self", ".", "get_offline_reports", "(", ")", ")", "if", "remaining_reports", "and", "self", ".", "watcher_enabled", ":", "self", ".", "start_watcher", "(", ")", "else", ":", "os", ".", "makedirs", "(", "self", ".", "report_dir", ")" ]
Convenience wrapper for database SELECT .
def select ( table , cols = "*" , where = ( ) , group = "" , order = ( ) , limit = ( ) , * * kwargs ) : where = dict ( where , * * kwargs ) . items ( ) sql , args = makeSQL ( "SELECT" , table , cols , where , group , order , limit ) return execute ( sql , args )
1,996
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L41-L45
[ "def", "_run_lint_on_file_stamped", "(", "*", "args", ")", ":", "# We pass an empty dictionary as keyword arguments here to work", "# around a bug in frosted, which crashes when no keyword arguments", "# are passed", "#", "# suppress(E204)", "stamp_args", ",", "stamp_kwargs", "=", "_run_lint_on_file_stamped_args", "(", "*", "args", ",", "*", "*", "{", "}", ")", "return", "jobstamp", ".", "run", "(", "_run_lint_on_file_exceptions", ",", "*", "stamp_args", ",", "*", "*", "stamp_kwargs", ")" ]
Convenience wrapper for database UPDATE .
def update ( table , values , where = ( ) , * * kwargs ) : where = dict ( where , * * kwargs ) . items ( ) sql , args = makeSQL ( "UPDATE" , table , values = values , where = where ) return execute ( sql , args ) . rowcount
1,997
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L48-L52
[ "def", "by_vol_id", "(", "blocks", ",", "slist", "=", "None", ")", ":", "vol_blocks", "=", "{", "}", "# sort block by volume", "# not reliable with multiple partitions (fifo)", "for", "i", "in", "blocks", ":", "if", "slist", "and", "i", "not", "in", "slist", ":", "continue", "elif", "not", "blocks", "[", "i", "]", ".", "is_valid", ":", "continue", "if", "blocks", "[", "i", "]", ".", "vid_hdr", ".", "vol_id", "not", "in", "vol_blocks", ":", "vol_blocks", "[", "blocks", "[", "i", "]", ".", "vid_hdr", ".", "vol_id", "]", "=", "[", "]", "vol_blocks", "[", "blocks", "[", "i", "]", ".", "vid_hdr", ".", "vol_id", "]", ".", "append", "(", "blocks", "[", "i", "]", ".", "peb_num", ")", "return", "vol_blocks" ]
Convenience wrapper for database DELETE .
def delete ( table , where = ( ) , * * kwargs ) : where = dict ( where , * * kwargs ) . items ( ) sql , args = makeSQL ( "DELETE" , table , where = where ) return execute ( sql , args ) . rowcount
1,998
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L55-L59
[ "def", "add_worksheet_progress_percentage", "(", "portal", ")", ":", "add_metadata", "(", "portal", ",", "CATALOG_WORKSHEET_LISTING", ",", "\"getProgressPercentage\"", ")", "logger", ".", "info", "(", "\"Reindexing Worksheets ...\"", ")", "query", "=", "dict", "(", "portal_type", "=", "\"Worksheet\"", ")", "brains", "=", "api", ".", "search", "(", "query", ",", "CATALOG_WORKSHEET_LISTING", ")", "total", "=", "len", "(", "brains", ")", "for", "num", ",", "brain", "in", "enumerate", "(", "brains", ")", ":", "if", "num", "%", "100", "==", "0", ":", "logger", ".", "info", "(", "\"Reindexing open Worksheets: {}/{}\"", ".", "format", "(", "num", ",", "total", ")", ")", "worksheet", "=", "api", ".", "get_object", "(", "brain", ")", "worksheet", ".", "reindexObject", "(", ")" ]
Returns a cursor to the database making new connection if not cached .
def make_cursor ( path , init_statements = ( ) , _connectioncache = { } ) : connection = _connectioncache . get ( path ) if not connection : is_new = not os . path . exists ( path ) or not os . path . getsize ( path ) try : is_new and os . makedirs ( os . path . dirname ( path ) ) except OSError : pass connection = sqlite3 . connect ( path , isolation_level = None , check_same_thread = False , detect_types = sqlite3 . PARSE_DECLTYPES ) for x in init_statements or ( ) : connection . execute ( x ) try : is_new and ":memory:" not in path . lower ( ) and os . chmod ( path , 0707 ) except OSError : pass connection . row_factory = lambda cur , row : dict ( sqlite3 . Row ( cur , row ) ) _connectioncache [ path ] = connection return connection . cursor ( )
1,999
https://github.com/suurjaak/InputScope/blob/245ff045163a1995e8cd5ac558d0a93024eb86eb/inputscope/db.py#L73-L87
[ "def", "remove_bika_listing_resources", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing bika_listing resouces\"", ")", "REMOVE_JS", "=", "[", "\"++resource++bika.lims.js/bika.lims.bikalisting.js\"", ",", "\"++resource++bika.lims.js/bika.lims.bikalistingfilterbar.js\"", ",", "]", "REMOVE_CSS", "=", "[", "\"bika_listing.css\"", ",", "]", "for", "js", "in", "REMOVE_JS", ":", "logger", ".", "info", "(", "\"********** Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")", "for", "css", "in", "REMOVE_CSS", ":", "logger", ".", "info", "(", "\"********** Unregistering CSS %s\"", "%", "css", ")", "portal", ".", "portal_css", ".", "unregisterResource", "(", "css", ")" ]