query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
The name of the project that is currently processed
def projectname ( self ) : if self . _projectname is None : exps = self . config . experiments if self . _experiment is not None and self . _experiment in exps : return exps [ self . _experiment ] [ 'project' ] try : self . _projectname = list ( self . config . projects . keys ( ) ) [ - 1 ] except IndexError : # no project has yet been created ever raise ValueError ( "No experiment has yet been created! Please run setup " "before." ) return self . _projectname
7,800
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L215-L227
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
The identifier or the experiment that is currently processed
def experiment ( self ) : if self . _experiment is None : self . _experiment = list ( self . config . experiments . keys ( ) ) [ - 1 ] return self . _experiment
7,801
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L235-L239
[ "def", "_get_distance_scaling_term", "(", "self", ",", "C", ",", "rval", ",", "mag", ")", ":", "r_adj", "=", "np", ".", "sqrt", "(", "rval", "**", "2.0", "+", "C", "[", "\"h\"", "]", "**", "2.0", ")", "return", "(", "(", "C", "[", "\"c1\"", "]", "+", "C", "[", "\"c2\"", "]", "*", "(", "mag", "-", "self", ".", "CONSTS", "[", "\"Mref\"", "]", ")", ")", "*", "np", ".", "log10", "(", "r_adj", "/", "self", ".", "CONSTS", "[", "\"Rref\"", "]", ")", "-", "(", "C", "[", "\"c3\"", "]", "*", "(", "r_adj", "-", "self", ".", "CONSTS", "[", "\"Rref\"", "]", ")", ")", ")" ]
The main function for parsing global arguments
def app_main ( self , experiment = None , last = False , new = False , verbose = False , verbosity_level = None , no_modification = False , match = False ) : if match : patt = re . compile ( experiment ) matches = list ( filter ( patt . search , self . config . experiments ) ) if len ( matches ) > 1 : raise ValueError ( "Found multiple matches for %s: %s" % ( experiment , matches ) ) elif len ( matches ) == 0 : raise ValueError ( "No experiment matches %s" % experiment ) experiment = matches [ 0 ] if last and self . config . experiments : self . experiment = None elif new and self . config . experiments : try : self . experiment = utils . get_next_name ( self . experiment ) except ValueError : raise ValueError ( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id." ) else : self . _experiment = experiment if verbose : verbose = logging . DEBUG elif verbosity_level : if verbosity_level in [ 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' ] : verbose = getattr ( logging , verbosity_level ) else : verbose = int ( verbosity_level ) if verbose : logging . getLogger ( utils . get_toplevel_module ( inspect . getmodule ( self ) ) ) . setLevel ( verbose ) self . logger . setLevel ( verbose ) self . no_modification = no_modification
7,802
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L248-L307
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Perform the initial setup for the project
def setup ( self , root_dir , projectname = None , link = False , * * kwargs ) : projects = self . config . projects if not projects and projectname is None : projectname = self . name + '0' elif projectname is None : # try to increment a number in the last used try : projectname = utils . get_next_name ( self . projectname ) except ValueError : raise ValueError ( "Could not estimate a project name! Please use the " "projectname argument to provide a project name." ) self . app_main ( * * kwargs ) root_dir = osp . abspath ( osp . join ( root_dir , projectname ) ) projects [ projectname ] = OrderedDict ( [ ( 'name' , projectname ) , ( 'root' , root_dir ) , ( 'timestamps' , OrderedDict ( ) ) ] ) data_dir = self . config . global_config . get ( 'data' , osp . join ( root_dir , 'data' ) ) projects [ projectname ] [ 'data' ] = data_dir self . projectname = projectname self . logger . info ( "Initializing project %s" , projectname ) self . logger . debug ( " Creating root directory %s" , root_dir ) if not osp . exists ( root_dir ) : os . makedirs ( root_dir ) return root_dir
7,803
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L366-L411
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Initialize a new experiment
def init ( self , projectname = None , description = None , * * kwargs ) : self . app_main ( * * kwargs ) experiments = self . config . experiments experiment = self . _experiment if experiment is None and not experiments : experiment = self . name + '_exp0' elif experiment is None : try : experiment = utils . get_next_name ( self . experiment ) except ValueError : raise ValueError ( "Could not estimate an experiment id! Please use the " "experiment argument to provide an id." ) self . experiment = experiment if self . is_archived ( experiment ) : raise ValueError ( "The specified experiment has already been archived! Run " "``%s -id %s unarchive`` first" % ( self . name , experiment ) ) if projectname is None : projectname = self . projectname else : self . projectname = projectname self . logger . info ( "Initializing experiment %s of project %s" , experiment , projectname ) exp_dict = experiments . setdefault ( experiment , OrderedDict ( ) ) if description is not None : exp_dict [ 'description' ] = description exp_dict [ 'project' ] = projectname exp_dict [ 'expdir' ] = exp_dir = osp . join ( 'experiments' , experiment ) exp_dir = osp . join ( self . config . projects [ projectname ] [ 'root' ] , exp_dir ) exp_dict [ 'timestamps' ] = OrderedDict ( ) if not os . path . exists ( exp_dir ) : self . logger . debug ( " Creating experiment directory %s" , exp_dir ) os . makedirs ( exp_dir ) self . fix_paths ( exp_dict ) return exp_dict
7,804
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L418-L471
[ "def", "use", "(", "self", ",", "kind", ",", "name", ")", ":", "try", ":", "params", "=", "self", ".", "_parse", "(", "name", ")", "index", "=", "int", "(", "params", "[", "'index'", "]", ",", "10", ")", "if", "index", "in", "self", ".", "_free", "[", "kind", "]", ":", "self", ".", "_free", "[", "kind", "]", ".", "remove", "(", "index", ")", "top", "=", "self", ".", "_top", "[", "kind", "]", "if", "index", ">", "top", ":", "self", ".", "_free", "[", "kind", "]", ".", "update", "(", "range", "(", "top", "+", "1", ",", "index", ")", ")", "self", ".", "_top", "[", "kind", "]", "=", "index", "except", "ValueError", ":", "log", ".", "warning", "(", "\"Cannot extract numerical index\"", "\" from node name `%s`!\"", ",", "name", ")" ]
Get one or more values in the configuration
def get_value ( self , keys , exp_path = False , project_path = False , complete = False , on_projects = False , on_globals = False , projectname = None , no_fix = False , only_keys = False , base = '' , return_list = False , archives = False , * * kwargs ) : def pretty_print ( val ) : if isinstance ( val , dict ) : if only_keys : val = list ( val . keys ( ) ) return ordered_yaml_dump ( val , default_flow_style = False ) . rstrip ( ) return str ( val ) config = self . info ( exp_path = exp_path , project_path = project_path , complete = complete , on_projects = on_projects , on_globals = on_globals , projectname = projectname , no_fix = no_fix , return_dict = True , insert_id = False , archives = archives , * * kwargs ) ret = [ 0 ] * len ( keys ) for i , key in enumerate ( keys ) : if base : key = base + key key , sub_config = utils . go_through_dict ( key , config ) ret [ i ] = sub_config [ key ] if return_list : return ret return ( self . print_ or six . print_ ) ( '\n' . join ( map ( pretty_print , ret ) ) )
7,805
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1159-L1201
[ "def", "wnreld", "(", "a", ",", "op", ",", "b", ")", ":", "assert", "isinstance", "(", "a", ",", "stypes", ".", "SpiceCell", ")", "assert", "b", ".", "dtype", "==", "1", "assert", "isinstance", "(", "b", ",", "stypes", ".", "SpiceCell", ")", "assert", "a", ".", "dtype", "==", "1", "assert", "isinstance", "(", "op", ",", "str", ")", "op", "=", "stypes", ".", "stringToCharP", "(", "op", ".", "encode", "(", "encoding", "=", "'UTF-8'", ")", ")", "return", "bool", "(", "libspice", ".", "wnreld_c", "(", "ctypes", ".", "byref", "(", "a", ")", ",", "op", ",", "ctypes", ".", "byref", "(", "b", ")", ")", ")" ]
Delete a value in the configuration
def del_value ( self , keys , complete = False , on_projects = False , on_globals = False , projectname = None , base = '' , dtype = None , * * kwargs ) : config = self . info ( complete = complete , on_projects = on_projects , on_globals = on_globals , projectname = projectname , return_dict = True , insert_id = False , * * kwargs ) for key in keys : if base : key = base + key key , sub_config = utils . go_through_dict ( key , config ) del sub_config [ key ]
7,806
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1219-L1241
[ "def", "key_regen", "(", ")", ":", "client", "=", "salt", ".", "client", ".", "get_local_client", "(", "__opts__", "[", "'conf_file'", "]", ")", "try", ":", "client", ".", "cmd", "(", "'*'", ",", "'saltutil.regen_keys'", ")", "except", "SaltClientError", "as", "client_error", ":", "print", "(", "client_error", ")", "return", "False", "for", "root", ",", "_", ",", "files", "in", "salt", ".", "utils", ".", "path", ".", "os_walk", "(", "__opts__", "[", "'pki_dir'", "]", ")", ":", "for", "fn_", "in", "files", ":", "path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fn_", ")", "try", ":", "os", ".", "remove", "(", "path", ")", "except", "os", ".", "error", ":", "pass", "msg", "=", "(", "'The minion and master keys have been deleted. Restart the Salt\\n'", "'Master within the next 60 seconds!!!\\n\\n'", "'Wait for the minions to reconnect. Once the minions reconnect\\n'", "'the new keys will appear in pending and will need to be re-\\n'", "'accepted by running:\\n'", "' salt-key -A\\n\\n'", "'Be advised that minions not currently connected to the master\\n'", "'will not be able to reconnect and may require manual\\n'", "'regeneration via a local call to\\n'", "' salt-call saltutil.regen_keys'", ")", "return", "msg" ]
Configure the project and experiments
def configure ( self , global_config = False , project_config = False , ifile = None , forcing = None , serial = False , nprocs = None , update_from = None , * * kwargs ) : if global_config : d = self . config . global_config elif project_config : self . app_main ( * * kwargs ) d = self . config . projects [ self . projectname ] else : d = self . config . experiments [ self . experiment ] if ifile is not None : d [ 'input' ] = osp . abspath ( ifile ) if forcing is not None : d [ 'forcing' ] = osp . abspath ( forcing ) if update_from is not None : with open ( 'update_from' ) as f : d . update ( yaml . load ( f ) ) global_config = self . config . global_config if serial : global_config [ 'serial' ] = True elif nprocs : nprocs = int ( nprocs ) if nprocs != 'all' else nprocs global_config [ 'serial' ] = False global_config [ 'nprocs' ] = nprocs
7,807
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1259-L1317
[ "def", "unload", "(", "self", ")", ":", "unloaded", "=", "False", "if", "self", ".", "_lib", "is", "not", "None", ":", "if", "self", ".", "_winlib", "is", "not", "None", ":", "# ctypes passes integers as 32-bit C integer types, which will", "# truncate the value of a 64-bit pointer in 64-bit python, so", "# we have to change the FreeLibrary method to take a pointer", "# instead of an integer handle.", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", ".", "argtypes", "=", "(", "ctypes", ".", "c_void_p", ",", ")", "# On Windows we must free both loaded libraries before the", "# temporary file can be cleaned up.", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", "(", "self", ".", "_lib", ".", "_handle", ")", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", "(", "self", ".", "_winlib", ".", "_handle", ")", "self", ".", "_lib", "=", "None", "self", ".", "_winlib", "=", "None", "unloaded", "=", "True", "else", ":", "# On OSX and Linux, just release the library; it's not safe", "# to close a dll that ctypes is using.", "del", "self", ".", "_lib", "self", ".", "_lib", "=", "None", "unloaded", "=", "True", "if", "self", ".", "_temp", "is", "not", "None", ":", "os", ".", "remove", "(", "self", ".", "_temp", ".", "name", ")", "self", ".", "_temp", "=", "None", "return", "unloaded" ]
Set a value in the configuration
def set_value ( self , items , complete = False , on_projects = False , on_globals = False , projectname = None , base = '' , dtype = None , * * kwargs ) : def identity ( val ) : return val config = self . info ( complete = complete , on_projects = on_projects , on_globals = on_globals , projectname = projectname , return_dict = True , insert_id = False , * * kwargs ) if isinstance ( dtype , six . string_types ) : dtype = getattr ( builtins , dtype ) elif dtype is None : dtype = identity for key , value in six . iteritems ( dict ( items ) ) : if base : key = base + key key , sub_config = utils . go_through_dict ( key , config , setdefault = OrderedDict ) if key in self . paths : if isinstance ( value , six . string_types ) : value = osp . abspath ( value ) else : value = list ( map ( osp . abspath , value ) ) sub_config [ key ] = dtype ( value )
7,808
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1331-L1368
[ "def", "key_regen", "(", ")", ":", "client", "=", "salt", ".", "client", ".", "get_local_client", "(", "__opts__", "[", "'conf_file'", "]", ")", "try", ":", "client", ".", "cmd", "(", "'*'", ",", "'saltutil.regen_keys'", ")", "except", "SaltClientError", "as", "client_error", ":", "print", "(", "client_error", ")", "return", "False", "for", "root", ",", "_", ",", "files", "in", "salt", ".", "utils", ".", "path", ".", "os_walk", "(", "__opts__", "[", "'pki_dir'", "]", ")", ":", "for", "fn_", "in", "files", ":", "path", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fn_", ")", "try", ":", "os", ".", "remove", "(", "path", ")", "except", "os", ".", "error", ":", "pass", "msg", "=", "(", "'The minion and master keys have been deleted. Restart the Salt\\n'", "'Master within the next 60 seconds!!!\\n\\n'", "'Wait for the minions to reconnect. Once the minions reconnect\\n'", "'the new keys will appear in pending and will need to be re-\\n'", "'accepted by running:\\n'", "' salt-key -A\\n\\n'", "'Be advised that minions not currently connected to the master\\n'", "'will not be able to reconnect and may require manual\\n'", "'regeneration via a local call to\\n'", "' salt-call saltutil.regen_keys'", ")", "return", "msg" ]
Fix the paths in the given dictionary to get relative paths
def rel_paths ( self , * args , * * kwargs ) : return self . config . experiments . rel_paths ( * args , * * kwargs )
7,809
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1410-L1425
[ "def", "checkIfAvailable", "(", "self", ",", "dateTime", "=", "timezone", ".", "now", "(", ")", ")", ":", "return", "(", "self", ".", "startTime", ">=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__closeBookingDays'", ")", ")", "and", "self", ".", "startTime", "<=", "dateTime", "+", "timedelta", "(", "days", "=", "getConstant", "(", "'privateLessons__openBookingDays'", ")", ")", "and", "not", "self", ".", "eventRegistration", "and", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "available", "or", "(", "self", ".", "status", "==", "self", ".", "SlotStatus", ".", "tentative", "and", "getattr", "(", "getattr", "(", "self", ".", "temporaryEventRegistration", ",", "'registration'", ",", "None", ")", ",", "'expirationDate'", ",", "timezone", ".", "now", "(", ")", ")", "<=", "timezone", ".", "now", "(", ")", ")", ")", ")" ]
Returns the path from the current working directory
def abspath ( self , path , project = None , root = None ) : if root is None : root = self . config . projects [ project or self . projectname ] [ 'root' ] return osp . join ( root , path )
7,810
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1440-L1463
[ "def", "updateSeriesRegistrationStatus", "(", ")", ":", "from", ".", "models", "import", "Series", "if", "not", "getConstant", "(", "'general__enableCronTasks'", ")", ":", "return", "logger", ".", "info", "(", "'Checking status of Series that are open for registration.'", ")", "open_series", "=", "Series", ".", "objects", ".", "filter", "(", ")", ".", "filter", "(", "*", "*", "{", "'registrationOpen'", ":", "True", "}", ")", "for", "series", "in", "open_series", ":", "series", ".", "updateRegistrationStatus", "(", ")" ]
Returns the relative path from the root directory of the project
def relpath ( self , path , project = None , root = None ) : if root is None : root = self . config . projects [ project or self . projectname ] [ 'root' ] return osp . relpath ( path , root )
7,811
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1465-L1488
[ "def", "EnableEditingOnService", "(", "self", ",", "url", ",", "definition", "=", "None", ")", ":", "adminFS", "=", "AdminFeatureService", "(", "url", "=", "url", ",", "securityHandler", "=", "self", ".", "_securityHandler", ")", "if", "definition", "is", "None", ":", "definition", "=", "collections", ".", "OrderedDict", "(", ")", "definition", "[", "'hasStaticData'", "]", "=", "False", "definition", "[", "'allowGeometryUpdates'", "]", "=", "True", "definition", "[", "'editorTrackingInfo'", "]", "=", "{", "}", "definition", "[", "'editorTrackingInfo'", "]", "[", "'enableEditorTracking'", "]", "=", "False", "definition", "[", "'editorTrackingInfo'", "]", "[", "'enableOwnershipAccessControl'", "]", "=", "False", "definition", "[", "'editorTrackingInfo'", "]", "[", "'allowOthersToUpdate'", "]", "=", "True", "definition", "[", "'editorTrackingInfo'", "]", "[", "'allowOthersToDelete'", "]", "=", "True", "definition", "[", "'capabilities'", "]", "=", "\"Query,Editing,Create,Update,Delete\"", "existingDef", "=", "{", "}", "existingDef", "[", "'capabilities'", "]", "=", "adminFS", ".", "capabilities", "existingDef", "[", "'allowGeometryUpdates'", "]", "=", "adminFS", ".", "allowGeometryUpdates", "enableResults", "=", "adminFS", ".", "updateDefinition", "(", "json_dict", "=", "definition", ")", "if", "'error'", "in", "enableResults", ":", "return", "enableResults", "[", "'error'", "]", "adminFS", "=", "None", "del", "adminFS", "print", "(", "enableResults", ")", "return", "existingDef" ]
Create the argument parser for this instance
def setup_parser ( self , parser = None , subparsers = None ) : commands = self . commands [ : ] parser_cmds = self . parser_commands . copy ( ) if subparsers is None : if parser is None : parser = FuncArgParser ( self . name ) subparsers = parser . add_subparsers ( chain = True ) ret = { } for i , cmd in enumerate ( commands [ : ] ) : func = getattr ( self , cmd ) parser_cmd = parser_cmds . setdefault ( cmd , cmd . replace ( '_' , '-' ) ) ret [ cmd ] = sp = parser . setup_subparser ( func , name = parser_cmd , return_parser = True ) sp . setup_args ( func ) modifier = getattr ( self , '_modify_' + cmd , None ) if modifier is not None : modifier ( sp ) self . parser_commands = parser_cmds parser . setup_args ( self . app_main ) self . _modify_app_main ( parser ) self . parser = parser self . subparsers = ret return parser , subparsers , ret
7,812
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1495-L1552
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Function returning the command line parser for this class
def get_parser ( cls ) : organizer = cls ( ) organizer . setup_parser ( ) organizer . _finish_parser ( ) return organizer . parser
7,813
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1584-L1589
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Convenience function to determine whether the given experiment has been archived already
def is_archived ( self , experiment , ignore_missing = True ) : if ignore_missing : if isinstance ( self . config . experiments . get ( experiment , True ) , Archive ) : return self . config . experiments . get ( experiment , True ) else : if isinstance ( self . config . experiments [ experiment ] , Archive ) : return self . config . experiments [ experiment ]
7,814
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1595-L1616
[ "def", "make_app", "(", "*", "args", ",", "*", "*", "kw", ")", ":", "default_options", "=", "[", "[", "'content_path'", ",", "'.'", "]", ",", "[", "'uri_marker'", ",", "''", "]", "]", "args", "=", "list", "(", "args", ")", "options", "=", "dict", "(", "default_options", ")", "options", ".", "update", "(", "kw", ")", "while", "default_options", "and", "args", ":", "_d", "=", "default_options", ".", "pop", "(", "0", ")", "_a", "=", "args", ".", "pop", "(", "0", ")", "options", "[", "_d", "[", "0", "]", "]", "=", "_a", "options", "[", "'content_path'", "]", "=", "os", ".", "path", ".", "abspath", "(", "options", "[", "'content_path'", "]", ".", "decode", "(", "'utf8'", ")", ")", "options", "[", "'uri_marker'", "]", "=", "options", "[", "'uri_marker'", "]", ".", "decode", "(", "'utf8'", ")", "selector", "=", "WSGIHandlerSelector", "(", ")", "git_inforefs_handler", "=", "GitHTTPBackendInfoRefs", "(", "*", "*", "options", ")", "git_rpc_handler", "=", "GitHTTPBackendSmartHTTP", "(", "*", "*", "options", ")", "static_handler", "=", "StaticServer", "(", "*", "*", "options", ")", "file_handler", "=", "FileServer", "(", "*", "*", "options", ")", "json_handler", "=", "JSONServer", "(", "*", "*", "options", ")", "ui_handler", "=", "UIServer", "(", "*", "*", "options", ")", "if", "options", "[", "'uri_marker'", "]", ":", "marker_regex", "=", "r'(?P<decorative_path>.*?)(?:/'", "+", "options", "[", "'uri_marker'", "]", "+", "')'", "else", ":", "marker_regex", "=", "''", "selector", ".", "add", "(", "marker_regex", "+", "r'(?P<working_path>.*?)/info/refs\\?.*?service=(?P<git_command>git-[^&]+).*$'", ",", "GET", "=", "git_inforefs_handler", ",", "HEAD", "=", "git_inforefs_handler", ")", "selector", ".", "add", "(", "marker_regex", "+", "r'(?P<working_path>.*)/(?P<git_command>git-[^/]+)$'", ",", "POST", "=", "git_rpc_handler", ")", "selector", ".", "add", "(", "marker_regex", "+", "r'/static/(?P<working_path>.*)$'", ",", "GET", "=", "static_handler", ",", "HEAD", "=", "static_handler", ")", "selector", ".", "add", "(", "marker_regex", "+", "r'(?P<working_path>.*)/file$'", ",", "GET", "=", "file_handler", ",", "HEAD", "=", "file_handler", ")", "selector", ".", "add", "(", "marker_regex", "+", "r'(?P<working_path>.*)$'", ",", "GET", "=", "ui_handler", ",", "POST", "=", "json_handler", ",", "HEAD", "=", "ui_handler", ")", "return", "selector" ]
Create translations from file extension to archive format
def _archive_extensions ( ) : if six . PY3 : ext_map = { } fmt_map = { } for key , exts , desc in shutil . get_unpack_formats ( ) : fmt_map [ key ] = exts [ 0 ] for ext in exts : ext_map [ ext ] = key else : ext_map = { '.tar' : 'tar' , '.tar.bz2' : 'bztar' , '.tar.gz' : 'gztar' , '.tar.xz' : 'xztar' , '.tbz2' : 'bztar' , '.tgz' : 'gztar' , '.txz' : 'xztar' , '.zip' : 'zip' } fmt_map = { 'bztar' : '.tar.bz2' , 'gztar' : '.tar.gz' , 'tar' : '.tar' , 'xztar' : '.tar.xz' , 'zip' : '.zip' } return ext_map , fmt_map
7,815
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/__init__.py#L1619-L1650
[ "def", "connection", "(", "self", ")", ":", "if", "self", ".", "_connections", ":", "if", "not", "self", ".", "_connections", ".", "acquire", "(", "self", ".", "_blocking", ")", ":", "raise", "TooManyConnections", "try", ":", "con", "=", "self", ".", "_cache", ".", "get", "(", "0", ")", "except", "Empty", ":", "con", "=", "self", ".", "steady_connection", "(", ")", "return", "PooledPgConnection", "(", "self", ",", "con", ")" ]
Load and display the PDF file specified by fileName .
def loadFile ( self , fileName ) : # Test if the file exists. if not QtCore . QFile ( fileName ) . exists ( ) : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg ) self . fileName = None return # Store the file name and load the PDF document with the # Poppler library. self . fileName = fileName doc = popplerqt4 . Poppler . Document . load ( fileName ) # Enable antialiasing to improve the readability of the fonts. doc . setRenderHint ( popplerqt4 . Poppler . Document . Antialiasing ) doc . setRenderHint ( popplerqt4 . Poppler . Document . TextAntialiasing ) # Convert each page to an image, then install that image as the # pixmap of a QLabel, and finally insert that QLabel into a # vertical layout. hbox = QtGui . QVBoxLayout ( ) for ii in range ( doc . numPages ( ) ) : pdf_img = doc . page ( ii ) . renderToImage ( ) pdf_label = self . qteAddWidget ( QtGui . QLabel ( ) ) pdf_label . setPixmap ( QtGui . QPixmap . fromImage ( pdf_img ) ) hbox . addWidget ( pdf_label ) # Use an auxiliary widget to hold that layout and then place # that auxiliary widget into a QScrollView. The auxiliary # widget is necessary because QScrollArea can only display a # single widget at once. tmp = self . qteAddWidget ( QtGui . QWidget ( self ) ) tmp . setLayout ( hbox ) self . qteScroll . setWidget ( tmp )
7,816
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/pdf_reader.py#L82-L119
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Return a Product object by ID .
def get_product ( membersuite_id , client = None ) : if not membersuite_id : return None client = client or get_new_client ( request_session = True ) object_query = "SELECT Object() FROM PRODUCT WHERE ID = '{}'" . format ( membersuite_id ) result = client . execute_object_query ( object_query ) msql_result = result [ "body" ] [ "ExecuteMSQLResult" ] if msql_result [ "Success" ] : membersuite_object_data = ( msql_result [ "ResultValue" ] [ "SingleObject" ] ) else : raise ExecuteMSQLError ( result = result ) return Product ( membersuite_object_data = membersuite_object_data )
7,817
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/financial/services.py#L6-L27
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Generate callback for a queue
def __watchers_callbacks_exec ( self , signal_name ) : def callback_fn ( ) : for watcher in self . __watchers_callbacks [ signal_name ] : if watcher is not None : watcher . notify ( ) return callback_fn
7,818
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/signals/signals.py#L126-L138
[ "def", "fingerprint", "(", "self", ",", "option_type", ",", "option_val", ")", ":", "if", "option_val", "is", "None", ":", "return", "None", "# Wrapping all other values in a list here allows us to easily handle single-valued and", "# list-valued options uniformly. For non-list-valued options, this will be a singleton list", "# (with the exception of dict, which is not modified). This dict exception works because we do", "# not currently have any \"list of dict\" type, so there is no ambiguity.", "if", "not", "isinstance", "(", "option_val", ",", "(", "list", ",", "tuple", ",", "dict", ")", ")", ":", "option_val", "=", "[", "option_val", "]", "if", "option_type", "==", "target_option", ":", "return", "self", ".", "_fingerprint_target_specs", "(", "option_val", ")", "elif", "option_type", "==", "dir_option", ":", "return", "self", ".", "_fingerprint_dirs", "(", "option_val", ")", "elif", "option_type", "==", "file_option", ":", "return", "self", ".", "_fingerprint_files", "(", "option_val", ")", "elif", "option_type", "==", "dict_with_files_option", ":", "return", "self", ".", "_fingerprint_dict_with_files", "(", "option_val", ")", "else", ":", "return", "self", ".", "_fingerprint_primitives", "(", "option_val", ")" ]
Trim a python doc string .
def py_doc_trim ( docstring ) : if not docstring : return '' # Convert tabs to spaces (following the normal Python rules) # and split into a list of lines: lines = docstring . expandtabs ( ) . splitlines ( ) # Determine minimum indentation (first line doesn't count): indent = sys . maxint for line in lines [ 1 : ] : stripped = line . lstrip ( ) if stripped : indent = min ( indent , len ( line ) - len ( stripped ) ) # Remove indentation (first line is special): trimmed = [ lines [ 0 ] . strip ( ) ] if indent < sys . maxint : for line in lines [ 1 : ] : trimmed . append ( line [ indent : ] . rstrip ( ) ) # Strip off trailing and leading blank lines: while trimmed and not trimmed [ - 1 ] : trimmed . pop ( ) while trimmed and not trimmed [ 0 ] : trimmed . pop ( 0 ) # The single string returned by the original function joined = '\n' . join ( trimmed ) # Return a version that replaces single newlines with spaces return newline_substitution_regex . sub ( " " , joined )
7,819
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/utils.py#L11-L44
[ "def", "_mmUpdateDutyCycles", "(", "self", ")", ":", "period", "=", "self", ".", "getDutyCyclePeriod", "(", ")", "unionSDRArray", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ")", "unionSDRArray", "[", "list", "(", "self", ".", "_mmTraces", "[", "\"unionSDR\"", "]", ".", "data", "[", "-", "1", "]", ")", "]", "=", "1", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", ",", "unionSDRArray", ",", "period", ")", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", ",", "self", ".", "_poolingActivation", ",", "period", ")" ]
Add names to all of the Resource fields .
def _fix_up_fields ( cls ) : cls . _fields = { } if cls . __module__ == __name__ and cls . __name__ != 'DebugResource' : return for name in set ( dir ( cls ) ) : attr = getattr ( cls , name , None ) if isinstance ( attr , BaseField ) : if name . startswith ( '_' ) : raise TypeError ( "Resource field %s cannot begin with an " "underscore. Underscore attributes are reserved " "for instance variables that aren't intended to " "propagate out to the HTTP caller." % name ) attr . _fix_up ( cls , name ) cls . _fields [ attr . name ] = attr if cls . _default_fields is None : cls . _default_fields = tuple ( cls . _fields . keys ( ) )
7,820
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L19-L42
[ "def", "cublasGetVersion", "(", "handle", ")", ":", "version", "=", "ctypes", ".", "c_int", "(", ")", "status", "=", "_libcublas", ".", "cublasGetVersion_v2", "(", "handle", ",", "ctypes", ".", "byref", "(", "version", ")", ")", "cublasCheckStatus", "(", "status", ")", "return", "version", ".", "value" ]
Renders a JSON - serializable version of the object passed in . Usually this means turning a Python object into a dict but sometimes it might make sense to render a list or a string or a tuple .
def _render_serializable ( self , obj , context ) : logging . info ( """Careful, you're calling ._render_serializable on the base resource, which is probably not what you actually want to be doing!""" ) if obj is None : logging . debug ( "_render_serializable passed a None obj, returning None" ) return None output = { } if self . _fields_to_render is None : return output for field in self . _fields_to_render : renderer = self . _fields [ field ] . render output [ field ] = renderer ( obj , field , context ) return output
7,821
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L59-L89
[ "def", "simplify", "(", "self", ")", ":", "expr", "=", "self", ".", "expr", "self", ".", "expr", "=", "_cancel_mul", "(", "expr", ",", "self", ".", "registry", ")", "return", "self" ]
Iterates through the passed in list_of_objs and calls the _render_serializable method of each object s Resource type .
def _render_serializable ( self , list_of_objs , context ) : output = [ ] for obj in list_of_objs : if obj is not None : item = self . _item_resource . _render_serializable ( obj , context ) output . append ( item ) return output
7,822
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/resource.py#L116-L132
[ "def", "dst", "(", "self", ",", "dt", ")", ":", "if", "not", "self", ".", "_is_dst", "(", "dt", ")", ":", "return", "datetime", ".", "timedelta", "(", "0", ")", "offset", "=", "time", ".", "timezone", "-", "time", ".", "altzone", "return", "datetime", ".", "timedelta", "(", "seconds", "=", "-", "offset", ")" ]
Protect a function with a lock that was get from the specified function . If a lock can not be acquire then no function call will be made
def critical_section_dynamic_lock ( lock_fn , blocking = True , timeout = None , raise_exception = True ) : if blocking is False or timeout is None : timeout = - 1 def first_level_decorator ( decorated_function ) : def second_level_decorator ( original_function , * args , * * kwargs ) : lock = lock_fn ( * args , * * kwargs ) if lock . acquire ( blocking = blocking , timeout = timeout ) is True : try : result = original_function ( * args , * * kwargs ) return result finally : lock . release ( ) elif raise_exception is True : raise WCriticalSectionError ( 'Unable to lock critical section\n' ) return decorator ( second_level_decorator ) ( decorated_function ) return first_level_decorator
7,823
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/thread.py#L42-L70
[ "def", "register", "(", "cls", ",", "config_type", ":", "Type", "[", "DecoderConfig", "]", ",", "suffix", ":", "str", ")", ":", "def", "wrapper", "(", "target_cls", ")", ":", "cls", ".", "__registry", "[", "config_type", "]", "=", "(", "target_cls", ",", "suffix", ")", "return", "target_cls", "return", "wrapper" ]
Return a JSON string format of a Pale module s documentation .
def generate_json_docs ( module , pretty_print = False , user = None ) : indent = None separators = ( ',' , ':' ) if pretty_print : indent = 4 separators = ( ',' , ': ' ) module_doc_dict = generate_doc_dict ( module , user ) json_str = json . dumps ( module_doc_dict , indent = indent , separators = separators ) return json_str
7,824
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L81-L104
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Return a RAML file of a Pale module s documentation as a string .
def generate_raml_docs ( module , fields , shared_types , user = None , title = "My API" , version = "v1" , api_root = "api" , base_uri = "http://mysite.com/{version}" ) : output = StringIO ( ) # Add the RAML header info output . write ( '#%RAML 1.0 \n' ) output . write ( 'title: ' + title + ' \n' ) output . write ( 'baseUri: ' + base_uri + ' \n' ) output . write ( 'version: ' + version + '\n' ) output . write ( 'mediaType: application/json\n\n' ) output . write ( 'documentation:\n' ) output . write ( ' - title: Welcome\n' ) output . write ( ' content: |\n' ) output . write ( """\ Welcome to the Loudr API Docs.\n You'll find comprehensive documentation on our endpoints and resources here. """ ) output . write ( "\n###############\n# Resource Types:\n###############\n\n" ) output . write ( 'types:\n' ) basic_fields = [ ] for field_module in inspect . getmembers ( fields , inspect . ismodule ) : for field_class in inspect . getmembers ( field_module [ 1 ] , inspect . isclass ) : basic_fields . append ( field_class [ 1 ] ) pale_basic_types = generate_basic_type_docs ( basic_fields , { } ) output . write ( "\n# Pale Basic Types:\n\n" ) output . write ( pale_basic_types [ 0 ] ) shared_fields = [ ] for shared_type in shared_types : for field_class in inspect . getmembers ( shared_type , inspect . isclass ) : shared_fields . append ( field_class [ 1 ] ) pale_shared_types = generate_basic_type_docs ( shared_fields , pale_basic_types [ 1 ] ) output . write ( "\n# Pale Shared Types:\n\n" ) output . write ( pale_shared_types [ 0 ] ) raml_resource_types = generate_raml_resource_types ( module ) output . write ( "\n# API Resource Types:\n\n" ) output . write ( raml_resource_types ) raml_resources = generate_raml_resources ( module , api_root , user ) output . write ( "\n\n###############\n# API Endpoints:\n###############\n\n" ) output . write ( raml_resources ) raml_docs = output . getvalue ( ) output . close ( ) return raml_docs
7,825
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L107-L169
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Map resource types to their RAML equivalents . Expects fields to be a list of modules - each module would be something like pale . fields . Expects existing_types to be a list of dict of existing types which will take precedence and prevent a new type with the same name from being added .
def generate_basic_type_docs ( fields , existing_types ) : # These types are standard in RAML 1.0 # They should not be defined in the RAML file that we return # We will inherit from them in the types we define raml_built_in_types = { "any" : { "parent" : None , } , "time-only" : { "parent" : "any" , } , "datetime" : { "parent" : "any" , "pale_children" : [ "timestamp" ] , } , "datetime-only" : { "parent" : "any" , } , "date-only" : { "parent" : "any" , "pale_children" : [ "date" ] , } , "number" : { "parent" : "any" , } , "boolean" : { "parent" : "any" , "pale_children" : [ "boolean" ] } , "string" : { "parent" : "any" , "pale_children" : [ "url" , "string" , "uri" ] , } , "null" : { "parent" : "any" , } , "file" : { "parent" : "any" , } , "array" : { "parent" : "any" , "pale_children" : [ "list" ] , } , "object" : { "parent" : "any" , } , "union" : { "parent" : "any" , } , "XSD Schema" : { "parent" : "any" , } , "JSON Schema" : { "parent" : "any" , } , "integer" : { "parent" : "number" , "pale_children" : [ "integer" ] , } , } basic_types = { } # Find all classes defined in a set of resources and build up an object with # the relevant details of the basic types for field in fields : # if this is a Pale type, it will have a 'value_type' property, if hasattr ( field , "value_type" ) : type_name = field . value_type . replace ( " " , "_" ) # add this type only if it is not in the built-in raml types and we have # not added it yet if type_name not in raml_built_in_types and type_name not in basic_types and type_name not in existing_types : basic_types [ type_name ] = { } # strip newlines and leading whitespaces from doc string, then add as description if hasattr ( field , "__doc__" ) : modified_description = clean_description ( field . __doc__ ) basic_types [ type_name ] [ "description" ] = modified_description # if this type is listed as the child of a built-in raml type, # use the raml type as its parent type for raml_type in raml_built_in_types : if "pale_children" in raml_built_in_types [ raml_type ] : if type_name in raml_built_in_types [ raml_type ] [ "pale_children" ] : basic_types [ type_name ] [ "type" ] = raml_type break else : # if this is not the child of a built-in raml type # and if this type is a list composed of other items: if hasattr ( field , "is_list" ) and field . is_list : basic_types [ type_name ] [ "type" ] = "array" # and the type is defined, use the defined type if hasattr ( field , "list_item_type" ) and field . list_item_type != None : basic_types [ type_name ] [ "items" ] = field . list_item_type # otherwise, use the base type else : basic_types [ type_name ] [ "items" ] = "base" # otherwise use the pale parent class as its type else : pale_parent_class = field . __mro__ [ 1 ] # if we are at the base class, inherit from the RAML "object" type if pale_parent_class . __name__ == "object" : basic_types [ type_name ] [ "type" ] = "object" # otherwise, inherit from the named parent else : basic_types [ type_name ] [ "type" ] = pale_parent_class . value_type ordered_basic_types = OrderedDict ( sorted ( basic_types . items ( ) , key = lambda t : t [ 0 ] ) ) basic_docs = generate_type_docs ( ordered_basic_types ) return ( basic_docs , basic_types )
7,826
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L215-L346
[ "def", "create", "(", "cls", ",", "destination", ")", ":", "mdb_gz_b64", "=", "\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"", "pristine", "=", "StringIO", "(", ")", "pristine", ".", "write", "(", "base64", ".", "b64decode", "(", "mdb_gz_b64", ")", ")", "pristine", ".", "seek", "(", "0", ")", "pristine", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "pristine", ",", "mode", "=", "'rb'", ")", "with", "open", "(", "destination", ",", "'wb'", ")", "as", "handle", ":", "shutil", ".", "copyfileobj", "(", "pristine", ",", "handle", ")", "return", "cls", "(", "destination", ")" ]
Compile a Pale module s documentation into a python dictionary .
def generate_doc_dict ( module , user ) : from pale import extract_endpoints , extract_resources , is_pale_module if not is_pale_module ( module ) : raise ValueError ( """The passed in `module` (%s) is not a pale module. `paledoc` only works on modules with a `_module_type` set to equal `pale.ImplementationModule`.""" ) module_endpoints = extract_endpoints ( module ) ep_doc = { ep . _route_name : document_endpoint ( ep ) for ep in module_endpoints } ep_doc_filtered = { } for endpoint in ep_doc : # check if user has permission to view this endpoint # this is currently an on/off switch: if any endpoint has a "@requires_permission" # decorator, user.is_admin must be True for the user to see documentation # @TODO - make this permission more granular if necessary if ep_doc [ endpoint ] . get ( "requires_permission" ) != None and user != None and user . is_admin or ep_doc [ endpoint ] . get ( "requires_permission" ) == None : ep_doc_filtered [ endpoint ] = ep_doc [ endpoint ] module_resources = extract_resources ( module ) res_doc = { r . _value_type : document_resource ( r ) for r in module_resources } return { 'endpoints' : ep_doc_filtered , 'resources' : res_doc }
7,827
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L781-L815
[ "def", "GetBestStringMatchValue", "(", "string1", ",", "string2", ")", ":", "# Ignore case", "string1", "=", "string1", ".", "lower", "(", ")", "string2", "=", "string2", ".", "lower", "(", ")", "# Ignore non-alphanumeric characters", "string1", "=", "''", ".", "join", "(", "i", "for", "i", "in", "string1", "if", "i", ".", "isalnum", "(", ")", ")", "string2", "=", "''", ".", "join", "(", "i", "for", "i", "in", "string2", "if", "i", ".", "isalnum", "(", ")", ")", "# Finding best match value between string1 and string2", "if", "len", "(", "string1", ")", "==", "0", "or", "len", "(", "string2", ")", "==", "0", ":", "bestRatio", "=", "0", "elif", "len", "(", "string1", ")", "==", "len", "(", "string2", ")", ":", "match", "=", "difflib", ".", "SequenceMatcher", "(", "None", ",", "string1", ",", "string2", ")", "bestRatio", "=", "match", ".", "ratio", "(", ")", "else", ":", "if", "len", "(", "string1", ")", ">", "len", "(", "string2", ")", ":", "shortString", "=", "string2", "longString", "=", "string1", "else", ":", "shortString", "=", "string1", "longString", "=", "string2", "match", "=", "difflib", ".", "SequenceMatcher", "(", "None", ",", "shortString", ",", "longString", ")", "bestRatio", "=", "match", ".", "ratio", "(", ")", "for", "block", "in", "match", ".", "get_matching_blocks", "(", ")", ":", "subString", "=", "longString", "[", "block", "[", "1", "]", ":", "block", "[", "1", "]", "+", "block", "[", "2", "]", "]", "subMatch", "=", "difflib", ".", "SequenceMatcher", "(", "None", ",", "shortString", ",", "subString", ")", "if", "(", "subMatch", ".", "ratio", "(", ")", ">", "bestRatio", ")", ":", "bestRatio", "=", "subMatch", ".", "ratio", "(", ")", "return", "(", "bestRatio", ")" ]
Extract the full documentation dictionary from the endpoint .
def document_endpoint ( endpoint ) : descr = clean_description ( py_doc_trim ( endpoint . __doc__ ) ) docs = { 'name' : endpoint . _route_name , 'http_method' : endpoint . _http_method , 'uri' : endpoint . _uri , 'description' : descr , 'arguments' : extract_endpoint_arguments ( endpoint ) , 'returns' : format_endpoint_returns_doc ( endpoint ) , } if hasattr ( endpoint , "_success" ) : docs [ "success" ] = endpoint . _success if hasattr ( endpoint , "_requires_permission" ) : docs [ "requires_permission" ] = endpoint . _requires_permission return docs
7,828
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L818-L833
[ "def", "checkIsConsistent", "(", "self", ")", ":", "if", "is_an_array", "(", "self", ".", "mask", ")", "and", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ":", "raise", "ConsistencyError", "(", "\"Shape mismatch mask={}, data={}\"", ".", "format", "(", "self", ".", "mask", ".", "shape", "!=", "self", ".", "data", ".", "shape", ")", ")" ]
Extract the argument documentation from the endpoint .
def extract_endpoint_arguments ( endpoint ) : ep_args = endpoint . _arguments if ep_args is None : return None arg_docs = { k : format_endpoint_argument_doc ( a ) for k , a in ep_args . iteritems ( ) } return arg_docs
7,829
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L836-L845
[ "def", "restore", "(", "self", ")", ":", "clean_beam", ",", "beam_params", "=", "beam_fit", "(", "self", ".", "psf_data", ",", "self", ".", "cdelt1", ",", "self", ".", "cdelt2", ")", "if", "np", ".", "all", "(", "np", ".", "array", "(", "self", ".", "psf_data_shape", ")", "==", "2", "*", "np", ".", "array", "(", "self", ".", "dirty_data_shape", ")", ")", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "conv", ".", "pad_array", "(", "self", ".", "model", ")", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "=", "self", ".", "restored", "[", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ",", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", "]", "else", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "self", ".", "model", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "+=", "self", ".", "residual", "self", ".", "restored", "=", "self", ".", "restored", ".", "astype", "(", "np", ".", "float32", ")", "return", "beam_params" ]
Return documentation about the argument that an endpoint accepts .
def format_endpoint_argument_doc ( argument ) : doc = argument . doc_dict ( ) # Trim the strings a bit doc [ 'description' ] = clean_description ( py_doc_trim ( doc [ 'description' ] ) ) details = doc . get ( 'detailed_description' , None ) if details is not None : doc [ 'detailed_description' ] = clean_description ( py_doc_trim ( details ) ) return doc
7,830
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L848-L858
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
Return documentation about the resource that an endpoint returns .
def format_endpoint_returns_doc ( endpoint ) : description = clean_description ( py_doc_trim ( endpoint . _returns . _description ) ) return { 'description' : description , 'resource_name' : endpoint . _returns . _value_type , 'resource_type' : endpoint . _returns . __class__ . __name__ }
7,831
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/doc.py#L861-L868
[ "async", "def", "update_lease_async", "(", "self", ",", "lease", ")", ":", "if", "lease", "is", "None", ":", "return", "False", "if", "not", "lease", ".", "token", ":", "return", "False", "_logger", ".", "debug", "(", "\"Updating lease %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ")", "# First, renew the lease to make sure the update will go through.", "if", "await", "self", ".", "renew_lease_async", "(", "lease", ")", ":", "try", ":", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "lease", ".", "partition_id", ",", "json", ".", "dumps", "(", "lease", ".", "serializable", "(", ")", ")", ",", "lease_id", "=", "lease", ".", "token", ")", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to update lease %r %r %r\"", ",", "self", ".", "host", ".", "guid", ",", "lease", ".", "partition_id", ",", "err", ")", "raise", "err", "else", ":", "return", "False", "return", "True" ]
Create formatted version of body text .
def save ( self , * args , * * kwargs ) : self . body_formatted = sanetize_text ( self . body ) super ( Contact , self ) . save ( )
7,832
https://github.com/tBaxter/tango-contact-manager/blob/7bd5be326a8db8f438cdefff0fbd14849d0474a5/build/lib/contact_manager/models.py#L239-L244
[ "def", "reconnect_redis", "(", "self", ")", ":", "if", "self", ".", "shared_client", "and", "Storage", ".", "storage", ":", "return", "Storage", ".", "storage", "storage", "=", "Redis", "(", "port", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PORT", ",", "host", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_HOST", ",", "db", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_DB", ",", "password", "=", "self", ".", "context", ".", "config", ".", "REDIS_RESULT_STORAGE_SERVER_PASSWORD", ")", "if", "self", ".", "shared_client", ":", "Storage", ".", "storage", "=", "storage", "return", "storage" ]
Return a current membership for this org or None if there is none .
def get_current_membership_for_org ( self , account_num , verbose = False ) : all_memberships = self . get_memberships_for_org ( account_num = account_num , verbose = verbose ) # Look for first membership that hasn't expired yet. for membership in all_memberships : if ( membership . expiration_date and membership . expiration_date > datetime . datetime . now ( ) ) : # noqa return membership # noqa return None
7,833
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L22-L34
[ "def", "guess_type", "(", "filename", ",", "*", "*", "kwargs", ")", ":", "extension", "=", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", "case", "=", "{", "'.xls'", ":", "Xls", ",", "'.xlsx'", ":", "Xlsx", ",", "'.csv'", ":", "Csv", "}", "if", "extension", "and", "case", ".", "get", "(", "extension", ".", "lower", "(", ")", ")", ":", "low_extension", "=", "extension", ".", "lower", "(", ")", "new_kwargs", "=", "dict", "(", ")", "class_name", "=", "case", ".", "get", "(", "low_extension", ")", "class_kwargs", "=", "inspect", ".", "getargspec", "(", "class_name", ".", "__init__", ")", ".", "args", "[", "1", ":", "]", "for", "kwarg", "in", "kwargs", ":", "if", "kwarg", "in", "class_kwargs", ":", "new_kwargs", "[", "kwarg", "]", "=", "kwargs", "[", "kwarg", "]", "return", "case", ".", "get", "(", "low_extension", ")", "(", "filename", ",", "*", "*", "new_kwargs", ")", "else", ":", "raise", "Exception", "(", "'No extension found'", ")" ]
Retrieve all memberships associated with an organization ordered by expiration date .
def get_memberships_for_org ( self , account_num , verbose = False ) : if not self . client . session_id : self . client . request_session ( ) query = "SELECT Objects() FROM Membership " "WHERE Owner = '%s' ORDER BY ExpirationDate" % account_num membership_list = self . get_long_query ( query , verbose = verbose ) return membership_list or [ ]
7,834
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L36-L48
[ "def", "asset", "(", "self", ",", "asset_id", ",", "asset_type", ",", "action", "=", "'GET'", ")", ":", "if", "not", "self", ".", "can_update", "(", ")", ":", "self", ".", "_tcex", ".", "handle_error", "(", "910", ",", "[", "self", ".", "type", "]", ")", "if", "asset_type", "==", "'PHONE'", ":", "return", "self", ".", "tc_requests", ".", "victim_phone_asset", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "asset_id", ",", "action", "=", "action", ")", "if", "asset_type", "==", "'EMAIL'", ":", "return", "self", ".", "tc_requests", ".", "victim_email_asset", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "asset_id", ",", "action", "=", "action", ")", "if", "asset_type", "==", "'NETWORK'", ":", "return", "self", ".", "tc_requests", ".", "victim_network_asset", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "asset_id", ",", "action", "=", "action", ")", "if", "asset_type", "==", "'SOCIAL'", ":", "return", "self", ".", "tc_requests", ".", "victim_social_asset", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "asset_id", ",", "action", "=", "action", ")", "if", "asset_type", "==", "'WEB'", ":", "return", "self", ".", "tc_requests", ".", "victim_web_asset", "(", "self", ".", "api_type", ",", "self", ".", "api_sub_type", ",", "self", ".", "unique_id", ",", "asset_id", ",", "action", "=", "action", ")", "self", ".", "_tcex", ".", "handle_error", "(", "925", ",", "[", "'asset_type'", ",", "'asset'", ",", "'asset_type'", ",", "'asset_type'", ",", "asset_type", "]", ")", "return", "None" ]
Retrieve all memberships updated since since_when
def get_all_memberships ( self , limit_to = 100 , max_calls = None , parameters = None , since_when = None , start_record = 0 , verbose = False ) : if not self . client . session_id : self . client . request_session ( ) query = "SELECT Objects() FROM Membership" # collect all where parameters into a list of # (key, operator, value) tuples where_params = [ ] if parameters : for k , v in parameters . items ( ) : where_params . append ( ( k , "=" , v ) ) if since_when : d = datetime . date . today ( ) - datetime . timedelta ( days = since_when ) where_params . append ( ( 'LastModifiedDate' , ">" , "'%s 00:00:00'" % d ) ) if where_params : query += " WHERE " query += " AND " . join ( [ "%s %s %s" % ( p [ 0 ] , p [ 1 ] , p [ 2 ] ) for p in where_params ] ) query += " ORDER BY LocalID" # note, get_long_query is overkill when just looking at # one org, but it still only executes once # `get_long_query` uses `ms_object_to_model` to return Organizations membership_list = self . get_long_query ( query , limit_to = limit_to , max_calls = max_calls , start_record = start_record , verbose = verbose ) return membership_list or [ ]
7,835
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L50-L92
[ "def", "tearpage_backend", "(", "filename", ",", "teared_pages", "=", "None", ")", ":", "# Handle default argument", "if", "teared_pages", "is", "None", ":", "teared_pages", "=", "[", "0", "]", "# Copy the pdf to a tmp file", "with", "tempfile", ".", "NamedTemporaryFile", "(", ")", "as", "tmp", ":", "# Copy the input file to tmp", "shutil", ".", "copy", "(", "filename", ",", "tmp", ".", "name", ")", "# Read the copied pdf", "# TODO: Use with syntax", "try", ":", "input_file", "=", "PdfFileReader", "(", "open", "(", "tmp", ".", "name", ",", "'rb'", ")", ")", "except", "PdfReadError", ":", "fix_pdf", "(", "filename", ",", "tmp", ".", "name", ")", "input_file", "=", "PdfFileReader", "(", "open", "(", "tmp", ".", "name", ",", "'rb'", ")", ")", "# Seek for the number of pages", "num_pages", "=", "input_file", ".", "getNumPages", "(", ")", "# Write pages excepted the first one", "output_file", "=", "PdfFileWriter", "(", ")", "for", "i", "in", "range", "(", "num_pages", ")", ":", "if", "i", "in", "teared_pages", ":", "continue", "output_file", ".", "addPage", "(", "input_file", ".", "getPage", "(", "i", ")", ")", "tmp", ".", "close", "(", ")", "outputStream", "=", "open", "(", "filename", ",", "\"wb\"", ")", "output_file", ".", "write", "(", "outputStream", ")" ]
Retrieves membership product objects
def get_all_membership_products ( self , verbose = False ) : if not self . client . session_id : self . client . request_session ( ) query = "SELECT Objects() FROM MembershipDuesProduct" membership_product_list = self . get_long_query ( query , verbose = verbose ) return membership_product_list or [ ]
7,836
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/memberships/services.py#L109-L119
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Creates a Selenium driver on the basis of the configuration file upon which this object was created .
def get_driver ( self , desired_capabilities = None ) : override_caps = desired_capabilities or { } desired_capabilities = self . config . make_selenium_desired_capabilities ( ) desired_capabilities . update ( override_caps ) browser_string = self . config . browser chromedriver_version = None if self . remote : driver = self . remote_service . build_driver ( desired_capabilities ) # There is no equivalent for BrowserStack. if browser_string == "CHROME" and self . remote_service . name == "saucelabs" : chromedriver_version = desired_capabilities . get ( "chromedriver-version" , None ) if chromedriver_version is None : raise ValueError ( "when using Chrome, you must set a " "``chromedriver-version`` capability so that Selenic " "can detect which version of Chromedriver will " "be used." ) else : if browser_string == "CHROME" : chromedriver_path = self . local_conf [ "CHROMEDRIVER_PATH" ] driver = webdriver . Chrome ( chromedriver_path , chrome_options = self . local_conf . get ( "CHROME_OPTIONS" ) , desired_capabilities = desired_capabilities , service_log_path = self . local_conf [ "SERVICE_LOG_PATH" ] , service_args = self . local_conf . get ( "SERVICE_ARGS" ) ) version_line = subprocess . check_output ( [ chromedriver_path , "--version" ] ) version_str = re . match ( ur"^ChromeDriver (\d+\.\d+)" , version_line ) . group ( 1 ) chromedriver_version = StrictVersion ( version_str ) elif browser_string == "FIREFOX" : profile = self . local_conf . get ( "FIREFOX_PROFILE" ) or FirefoxProfile ( ) binary = self . local_conf . get ( "FIREFOX_BINARY" ) or FirefoxBinary ( ) driver = webdriver . Firefox ( profile , binary , capabilities = desired_capabilities ) elif browser_string == "INTERNETEXPLORER" : driver = webdriver . Ie ( ) elif browser_string == "OPERA" : driver = webdriver . Opera ( ) else : # SAFARI # HTMLUNIT # HTMLUNITWITHJS # IPHONE # IPAD # ANDROID # PHANTOMJS raise ValueError ( "can't start a local " + browser_string ) # Check that what we get is what the config wanted... driver_caps = NormalizedCapabilities ( driver . desired_capabilities ) browser_version = re . sub ( r"\..*$" , "" , driver_caps [ "browserVersion" ] ) if driver_caps [ "platformName" ] . upper ( ) != self . config . platform : raise ValueError ( "the platform you want is not the one " "you are running selenic on" ) if browser_version != self . config . version : raise ValueError ( "the version installed is not the one " "you wanted" ) # On BrowserStack we cannot set the version of chromedriver or # query it. So we make the reasonable assuption that the # version of chromedriver is greater than 2.13. (There have # been at least 7 releases after 2.13 at the time of writing.) if ( self . remote_service and self . remote_service . name == "browserstack" ) or ( chromedriver_version is not None and chromedriver_version > StrictVersion ( "2.13" ) ) : # We patch ActionChains. chromedriver_element_center_patch ( ) # We need to mark the driver as needing the patch. setattr ( driver , CHROMEDRIVER_ELEMENT_CENTER_PATCH_FLAG , True ) driver = self . patch ( driver ) return driver
7,837
https://github.com/mangalam-research/selenic/blob/2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad/selenic/builder.py#L73-L171
[ "async", "def", "incoming", "(", "self", ")", ":", "msg", "=", "await", "self", ".", "_queue", ".", "get", "(", ")", "self", ".", "_queue", ".", "task_done", "(", ")", "return", "msg" ]
If a FIREFOX_BINARY was specified this method updates an environment variable used by the FirefoxBinary instance to the current value of the variable in the environment .
def update_ff_binary_env ( self , variable ) : if self . config . browser != 'FIREFOX' : return binary = self . local_conf . get ( 'FIREFOX_BINARY' ) if binary is None : return # pylint: disable=protected-access binary . _firefox_env [ variable ] = os . environ [ variable ]
7,838
https://github.com/mangalam-research/selenic/blob/2284c68e15fa3d34b88aa2eec1a2e8ecd37f44ad/selenic/builder.py#L173-L200
[ "def", "rejection_sample", "(", "n_samples", ",", "pool_size", ",", "rng_state", ")", ":", "result", "=", "np", ".", "empty", "(", "n_samples", ",", "dtype", "=", "np", ".", "int64", ")", "for", "i", "in", "range", "(", "n_samples", ")", ":", "reject_sample", "=", "True", "while", "reject_sample", ":", "j", "=", "tau_rand_int", "(", "rng_state", ")", "%", "pool_size", "for", "k", "in", "range", "(", "i", ")", ":", "if", "j", "==", "result", "[", "k", "]", ":", "break", "else", ":", "reject_sample", "=", "False", "result", "[", "i", "]", "=", "j", "return", "result" ]
URL Validation regex Based on regular expression by Diego Perini (
def regex ( self , protocols , localhost = True ) : p = r"^" # protocol p += r"(?:(?:(?:{}):)?//)" . format ( '|' . join ( protocols ) ) # basic auth (optional) p += r"(?:\S+(?::\S*)?@)?" p += r"(?:" # ip exclusion: private and local networks p += r"(?!(?:10|127)(?:\.\d{1,3}){3})" p += r"(?!(?:169\.254|192\.168)(?:\.\d{1,3}){2})" p += r"(?!172\.(?:1[6-9]|2\d|3[0-1])(?:\.\d{1,3}){2})" # ip excluding loopback (0.0.0.0), reserved space (244.0.0.0) # and network/broadcast addresses p += r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" p += r"(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}" p += r"(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))" p += r"|" # hostname p += r"(?:" p += r"(?:" p += r"[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?" p += r"[a-z0-9\u00a1-\uffff]" p += r"\." if not localhost else r"[\.]?|localhost" p += r")+" # tld p += r"(?:[a-z\u00a1-\uffff]{2,}\.?)" p += r")" # port (optional) p += r"(?::\d{2,5})?" # path (optional) p += r"(?:[/?#]\S*)?" p += r"$" return p
7,839
https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/validators/url.py#L68-L118
[ "def", "start", "(", "self", ")", ":", "zones", "=", "[", "{", "\"id\"", ":", "data", "[", "0", "]", ",", "\"duration\"", ":", "data", "[", "1", "]", ",", "\"sortOrder\"", ":", "count", "}", "for", "(", "count", ",", "data", ")", "in", "enumerate", "(", "self", ".", "_zones", ",", "1", ")", "]", "self", ".", "_api", ".", "startMultiple", "(", "zones", ")" ]
Append given layers to this onion
def add_layers ( self , * layers ) : for layer in layers : if layer . name ( ) in self . __layers . keys ( ) : raise ValueError ( 'Layer "%s" already exists' % layer . name ( ) ) self . __layers [ layer . name ( ) ] = layer
7,840
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/messenger/onion.py#L86-L95
[ "def", "process_config", "(", "config", ",", "config_data", ")", ":", "if", "'components'", "in", "config_data", ":", "process_components_config_section", "(", "config", ",", "config_data", "[", "'components'", "]", ")", "if", "'data'", "in", "config_data", ":", "process_data_config_section", "(", "config", ",", "config_data", "[", "'data'", "]", ")", "if", "'log'", "in", "config_data", ":", "process_log_config_section", "(", "config", ",", "config_data", "[", "'log'", "]", ")", "if", "'management'", "in", "config_data", ":", "process_management_config_section", "(", "config", ",", "config_data", "[", "'management'", "]", ")", "if", "'session'", "in", "config_data", ":", "process_session_config_section", "(", "config", ",", "config_data", "[", "'session'", "]", ")" ]
Generate a mapping of a list of objects indexed by the given attr .
def index ( objects , attr ) : with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) return { getattr ( obj , attr ) : obj for obj in objects }
7,841
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L428-L467
[ "def", "is_valid_image", "(", "self", ",", "raw_data", ")", ":", "buf", "=", "StringIO", "(", "raw_data", ")", "try", ":", "trial_image", "=", "Image", ".", "open", "(", "buf", ")", "trial_image", ".", "verify", "(", ")", "except", "Exception", ":", "# TODO: Get more specific with this exception handling.", "return", "False", "return", "True" ]
Add the typedef to this engine if it is compatible .
def register ( self , typedef ) : if typedef in self . bound_types : return if not self . is_compatible ( typedef ) : raise ValueError ( "Incompatible type {} for engine {}" . format ( typedef , self ) ) if typedef not in self . unbound_types : self . unbound_types . add ( typedef ) typedef . _register ( self )
7,842
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L73-L103
[ "def", "stop_experiment", "(", "args", ")", ":", "experiment_id_list", "=", "parse_ids", "(", "args", ")", "if", "experiment_id_list", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "for", "experiment_id", "in", "experiment_id_list", ":", "print_normal", "(", "'Stoping experiment %s'", "%", "experiment_id", ")", "nni_config", "=", "Config", "(", "experiment_dict", "[", "experiment_id", "]", "[", "'fileName'", "]", ")", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "rest_pid", ":", "kill_command", "(", "rest_pid", ")", "tensorboard_pid_list", "=", "nni_config", ".", "get_config", "(", "'tensorboardPidList'", ")", "if", "tensorboard_pid_list", ":", "for", "tensorboard_pid", "in", "tensorboard_pid_list", ":", "try", ":", "kill_command", "(", "tensorboard_pid", ")", "except", "Exception", "as", "exception", ":", "print_error", "(", "exception", ")", "nni_config", ".", "set_config", "(", "'tensorboardPidList'", ",", "[", "]", ")", "print_normal", "(", "'Stop experiment success!'", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'status'", ",", "'STOPPED'", ")", "time_now", "=", "time", ".", "strftime", "(", "'%Y-%m-%d %H:%M:%S'", ",", "time", ".", "localtime", "(", "time", ".", "time", "(", ")", ")", ")", "experiment_config", ".", "update_experiment", "(", "experiment_id", ",", "'endTime'", ",", "str", "(", "time_now", ")", ")" ]
Bind all unbound types to the engine .
def bind ( self , * * config ) : while self . unbound_types : typedef = self . unbound_types . pop ( ) try : load , dump = typedef . bind ( self , * * config ) self . bound_types [ typedef ] = { "load" : load , "dump" : dump } except Exception : self . unbound_types . add ( typedef ) raise
7,843
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L105-L132
[ "def", "alter_retention_policy", "(", "self", ",", "name", ",", "database", "=", "None", ",", "duration", "=", "None", ",", "replication", "=", "None", ",", "default", "=", "None", ",", "shard_duration", "=", "None", ")", ":", "query_string", "=", "(", "\"ALTER RETENTION POLICY {0} ON {1}\"", ")", ".", "format", "(", "quote_ident", "(", "name", ")", ",", "quote_ident", "(", "database", "or", "self", ".", "_database", ")", ",", "shard_duration", ")", "if", "duration", ":", "query_string", "+=", "\" DURATION {0}\"", ".", "format", "(", "duration", ")", "if", "shard_duration", ":", "query_string", "+=", "\" SHARD DURATION {0}\"", ".", "format", "(", "shard_duration", ")", "if", "replication", ":", "query_string", "+=", "\" REPLICATION {0}\"", ".", "format", "(", "replication", ")", "if", "default", "is", "True", ":", "query_string", "+=", "\" DEFAULT\"", "self", ".", "query", "(", "query_string", ",", "method", "=", "\"POST\"", ")" ]
Return the result of the bound load method for a typedef
def load ( self , typedef , value , * * kwargs ) : try : bound_type = self . bound_types [ typedef ] except KeyError : raise DeclareException ( "Can't load unknown type {}" . format ( typedef ) ) else : # Don't need to try/catch since load/dump are bound together return bound_type [ "load" ] ( value , * * kwargs )
7,844
https://github.com/numberoverzero/declare/blob/1b05ceca91fbdc3e8e770a376c2f070365c425ff/declare.py#L134-L188
[ "def", "update_experiment", "(", ")", ":", "experiment_config", "=", "Experiments", "(", ")", "experiment_dict", "=", "experiment_config", ".", "get_all_experiments", "(", ")", "if", "not", "experiment_dict", ":", "return", "None", "for", "key", "in", "experiment_dict", ".", "keys", "(", ")", ":", "if", "isinstance", "(", "experiment_dict", "[", "key", "]", ",", "dict", ")", ":", "if", "experiment_dict", "[", "key", "]", ".", "get", "(", "'status'", ")", "!=", "'STOPPED'", ":", "nni_config", "=", "Config", "(", "experiment_dict", "[", "key", "]", "[", "'fileName'", "]", ")", "rest_pid", "=", "nni_config", ".", "get_config", "(", "'restServerPid'", ")", "if", "not", "detect_process", "(", "rest_pid", ")", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "'STOPPED'", ")", "continue", "rest_port", "=", "nni_config", ".", "get_config", "(", "'restServerPort'", ")", "startTime", ",", "endTime", "=", "get_experiment_time", "(", "rest_port", ")", "if", "startTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'startTime'", ",", "startTime", ")", "if", "endTime", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'endTime'", ",", "endTime", ")", "status", "=", "get_experiment_status", "(", "rest_port", ")", "if", "status", ":", "experiment_config", ".", "update_experiment", "(", "key", ",", "'status'", ",", "status", ")" ]
Return the string representing the configuration directory .
def get_configdir ( name ) : configdir = os . environ . get ( '%sCONFIGDIR' % name . upper ( ) ) if configdir is not None : return os . path . abspath ( configdir ) p = None h = _get_home ( ) if ( ( sys . platform . startswith ( 'linux' ) or sys . platform . startswith ( 'darwin' ) ) and h is not None ) : p = os . path . join ( h , '.config/' + name ) elif h is not None : p = os . path . join ( h , '.' + name ) if not os . path . exists ( p ) : os . makedirs ( p ) return p
7,845
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L45-L83
[ "def", "get_records", "(", "self", ")", ":", "form", "=", "self", ".", "request", ".", "form", "ar_count", "=", "self", ".", "get_ar_count", "(", ")", "records", "=", "[", "]", "# Group belonging AR fields together", "for", "arnum", "in", "range", "(", "ar_count", ")", ":", "record", "=", "{", "}", "s1", "=", "\"-{}\"", ".", "format", "(", "arnum", ")", "keys", "=", "filter", "(", "lambda", "key", ":", "s1", "in", "key", ",", "form", ".", "keys", "(", ")", ")", "for", "key", "in", "keys", ":", "new_key", "=", "key", ".", "replace", "(", "s1", ",", "\"\"", ")", "value", "=", "form", ".", "get", "(", "key", ")", "record", "[", "new_key", "]", "=", "value", "records", ".", "append", "(", "record", ")", "return", "records" ]
Dumps the stream from an OrderedDict . Taken from
def ordered_yaml_dump ( data , stream = None , Dumper = None , * * kwds ) : Dumper = Dumper or yaml . Dumper class OrderedDumper ( Dumper ) : pass def _dict_representer ( dumper , data ) : return dumper . represent_mapping ( yaml . resolver . BaseResolver . DEFAULT_MAPPING_TAG , data . items ( ) ) OrderedDumper . add_representer ( OrderedDict , _dict_representer ) return yaml . dump ( data , stream , OrderedDumper , * * kwds )
7,846
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L152-L168
[ "def", "clip_joint_velocities", "(", "self", ",", "velocities", ")", ":", "for", "i", "in", "range", "(", "len", "(", "velocities", ")", ")", ":", "if", "velocities", "[", "i", "]", ">=", "1.0", ":", "velocities", "[", "i", "]", "=", "1.0", "elif", "velocities", "[", "i", "]", "<=", "-", "1.0", ":", "velocities", "[", "i", "]", "=", "-", "1.0", "return", "velocities" ]
Load the file fname and make sure it can be done in parallel
def safe_load ( fname ) : lock = fasteners . InterProcessLock ( fname + '.lck' ) lock . acquire ( ) try : with open ( fname ) as f : return ordered_yaml_load ( f ) except : raise finally : lock . release ( )
7,847
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L171-L188
[ "def", "reset_subscriptions", "(", "self", ",", "accounts", "=", "[", "]", ",", "markets", "=", "[", "]", ",", "objects", "=", "[", "]", ")", ":", "self", ".", "websocket", ".", "reset_subscriptions", "(", "accounts", ",", "self", ".", "get_market_ids", "(", "markets", ")", ",", "objects", ")" ]
Savely dump d to fname using yaml
def safe_dump ( d , fname , * args , * * kwargs ) : if osp . exists ( fname ) : os . rename ( fname , fname + '~' ) lock = fasteners . InterProcessLock ( fname + '.lck' ) lock . acquire ( ) try : with open ( fname , 'w' ) as f : ordered_yaml_dump ( d , f , * args , * * kwargs ) except : raise finally : lock . release ( )
7,848
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L191-L220
[ "def", "validate", "(", "request_schema", "=", "None", ",", "response_schema", "=", "None", ")", ":", "def", "wrapper", "(", "func", ")", ":", "# Validating the schemas itself.", "# Die with exception if they aren't valid", "if", "request_schema", "is", "not", "None", ":", "_request_schema_validator", "=", "validator_for", "(", "request_schema", ")", "_request_schema_validator", ".", "check_schema", "(", "request_schema", ")", "if", "response_schema", "is", "not", "None", ":", "_response_schema_validator", "=", "validator_for", "(", "response_schema", ")", "_response_schema_validator", ".", "check_schema", "(", "response_schema", ")", "@", "asyncio", ".", "coroutine", "@", "functools", ".", "wraps", "(", "func", ")", "def", "wrapped", "(", "*", "args", ")", ":", "if", "asyncio", ".", "iscoroutinefunction", "(", "func", ")", ":", "coro", "=", "func", "else", ":", "coro", "=", "asyncio", ".", "coroutine", "(", "func", ")", "# Supports class based views see web.View", "if", "isinstance", "(", "args", "[", "0", "]", ",", "AbstractView", ")", ":", "class_based", "=", "True", "request", "=", "args", "[", "0", "]", ".", "request", "else", ":", "class_based", "=", "False", "request", "=", "args", "[", "-", "1", "]", "# Strictly expect json object here", "try", ":", "req_body", "=", "yield", "from", "request", ".", "json", "(", ")", "except", "(", "json", ".", "decoder", ".", "JSONDecodeError", ",", "TypeError", ")", ":", "_raise_exception", "(", "web", ".", "HTTPBadRequest", ",", "\"Request is malformed; could not decode JSON object.\"", ")", "# Validate request data against request schema (if given)", "if", "request_schema", "is", "not", "None", ":", "_validate_data", "(", "req_body", ",", "request_schema", ",", "_request_schema_validator", ")", "coro_args", "=", "req_body", ",", "request", "if", "class_based", ":", "coro_args", "=", "(", "args", "[", "0", "]", ",", ")", "+", "coro_args", "context", "=", "yield", "from", "coro", "(", "*", "coro_args", ")", "# No validation of response for websockets stream", "if", "isinstance", "(", "context", ",", "web", ".", "StreamResponse", ")", ":", "return", "context", "# Validate response data against response schema (if given)", "if", "response_schema", "is", "not", "None", ":", "_validate_data", "(", "context", ",", "response_schema", ",", "_response_schema_validator", ")", "try", ":", "return", "web", ".", "json_response", "(", "context", ")", "except", "(", "TypeError", ",", ")", ":", "_raise_exception", "(", "web", ".", "HTTPInternalServerError", ",", "\"Response is malformed; could not encode JSON object.\"", ")", "# Store schemas in wrapped handlers, so it later can be reused", "setattr", "(", "wrapped", ",", "\"_request_schema\"", ",", "request_schema", ")", "setattr", "(", "wrapped", ",", "\"_response_schema\"", ",", "response_schema", ")", "return", "wrapped", "return", "wrapper" ]
A mapping from project name to experiments
def project_map ( self ) : # first update with the experiments in the memory (the others should # already be loaded within the :attr:`exp_files` attribute) for key , val in self . items ( ) : if isinstance ( val , dict ) : l = self . _project_map [ val [ 'project' ] ] elif isinstance ( val , Archive ) : l = self . _project_map [ val . project ] else : continue if key not in l : l . append ( key ) return self . _project_map
7,849
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L260-L273
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_ptr", ":", "#try:", "# # Release inner BFD files in case we're an archive BFD.", "# if self.is_archive:", "# [inner_bfd.close() for inner_bfd in self.archive_files]", "#except TypeError, err:", "# pass", "try", ":", "_bfd", ".", "close", "(", "self", ".", "_ptr", ")", "except", "TypeError", ",", "err", ":", "raise", "BfdException", "(", "\"Unable to close bfd (%s)\"", "%", "err", ")", "finally", ":", "self", ".", "_ptr", "=", "None" ]
A mapping from experiment to experiment configuration file
def exp_files ( self ) : ret = OrderedDict ( ) # restore the order of the experiments exp_file = self . exp_file if osp . exists ( exp_file ) : for key , val in safe_load ( exp_file ) . items ( ) : ret [ key ] = val for project , d in self . projects . items ( ) : project_path = d [ 'root' ] config_path = osp . join ( project_path , '.project' ) if not osp . exists ( config_path ) : continue for fname in glob . glob ( osp . join ( config_path , '*.yml' ) ) : if fname == '.project.yml' : continue exp = osp . splitext ( osp . basename ( fname ) ) [ 0 ] if not isinstance ( ret . get ( exp ) , Archive ) : ret [ exp ] = osp . join ( config_path , exp + '.yml' ) if exp not in self . _project_map [ project ] : self . _project_map [ project ] . append ( exp ) return ret
7,850
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L276-L301
[ "def", "safeEncodeAttribute", "(", "self", ",", "encValue", ")", ":", "encValue", "=", "encValue", ".", "replace", "(", "u'&'", ",", "u'&amp;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'<'", ",", "u'&lt;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'>'", ",", "u'&gt;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'\"'", ",", "u'&quot;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'{'", ",", "u'&#123;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'['", ",", "u'&#91;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u\"''\"", ",", "u'&#39;&#39;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'ISBN'", ",", "u'&#73;SBN'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'RFC'", ",", "u'&#82;FC'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'PMID'", ",", "u'&#80;MID'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'|'", ",", "u'&#124;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'__'", ",", "u'&#95;_'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'\\n'", ",", "u'&#10;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'\\r'", ",", "u'&#13;'", ")", "encValue", "=", "encValue", ".", "replace", "(", "u'\\t'", ",", "u'&#9;'", ")", "return", "encValue" ]
Save the experiment configuration
def save ( self ) : for exp , d in dict ( self ) . items ( ) : if isinstance ( d , dict ) : project_path = self . projects [ d [ 'project' ] ] [ 'root' ] d = self . rel_paths ( copy . deepcopy ( d ) ) fname = osp . join ( project_path , '.project' , exp + '.yml' ) if not osp . exists ( osp . dirname ( fname ) ) : os . makedirs ( osp . dirname ( fname ) ) safe_dump ( d , fname , default_flow_style = False ) exp_file = self . exp_file # to be 100% sure we do not write to the file from multiple processes lock = fasteners . InterProcessLock ( exp_file + '.lck' ) lock . acquire ( ) safe_dump ( OrderedDict ( ( exp , val if isinstance ( val , Archive ) else None ) for exp , val in self . items ( ) ) , exp_file , default_flow_style = False ) lock . release ( )
7,851
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L447-L471
[ "def", "move_vobject", "(", "self", ",", "uid", ",", "from_file", ",", "to_file", ")", ":", "if", "from_file", "not", "in", "self", ".", "_reminders", "or", "to_file", "not", "in", "self", ".", "_reminders", ":", "return", "uid", "=", "uid", ".", "split", "(", "'@'", ")", "[", "0", "]", "with", "self", ".", "_lock", ":", "rem", "=", "open", "(", "from_file", ")", ".", "readlines", "(", ")", "for", "(", "index", ",", "line", ")", "in", "enumerate", "(", "rem", ")", ":", "if", "uid", "==", "md5", "(", "line", "[", ":", "-", "1", "]", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", ":", "del", "rem", "[", "index", "]", "open", "(", "from_file", ",", "'w'", ")", ".", "writelines", "(", "rem", ")", "open", "(", "to_file", ",", "'a'", ")", ".", "write", "(", "line", ")", "break" ]
Convenience method to convert this object into an OrderedDict
def as_ordereddict ( self ) : if six . PY2 : d = OrderedDict ( ) copied = dict ( self ) for key in self : d [ key ] = copied [ key ] else : d = OrderedDict ( self ) return d
7,852
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L480-L489
[ "def", "delete_files", "(", ")", ":", "session_token", "=", "request", ".", "headers", "[", "'session_token'", "]", "repository", "=", "request", ".", "headers", "[", "'repository'", "]", "#===", "current_user", "=", "have_authenticated_user", "(", "request", ".", "environ", "[", "'REMOTE_ADDR'", "]", ",", "repository", ",", "session_token", ")", "if", "current_user", "is", "False", ":", "return", "fail", "(", "user_auth_fail_msg", ")", "#===", "repository_path", "=", "config", "[", "'repositories'", "]", "[", "repository", "]", "[", "'path'", "]", "body_data", "=", "request", ".", "get_json", "(", ")", "def", "with_exclusive_lock", "(", ")", ":", "if", "not", "varify_user_lock", "(", "repository_path", ",", "session_token", ")", ":", "return", "fail", "(", "lock_fail_msg", ")", "try", ":", "data_store", "=", "versioned_storage", "(", "repository_path", ")", "if", "not", "data_store", ".", "have_active_commit", "(", ")", ":", "return", "fail", "(", "no_active_commit_msg", ")", "#-------------", "for", "fle", "in", "json", ".", "loads", "(", "body_data", "[", "'files'", "]", ")", ":", "data_store", ".", "fs_delete", "(", "fle", ")", "# updates the user lock expiry", "update_user_lock", "(", "repository_path", ",", "session_token", ")", "return", "success", "(", ")", "except", "Exception", ":", "return", "fail", "(", ")", "# pylint: disable=broad-except", "return", "lock_access", "(", "repository_path", ",", "with_exclusive_lock", ")" ]
Remove the configuration of an experiment
def remove ( self , experiment ) : try : project_path = self . projects [ self [ experiment ] [ 'project' ] ] [ 'root' ] except KeyError : return config_path = osp . join ( project_path , '.project' , experiment + '.yml' ) for f in [ config_path , config_path + '~' , config_path + '.lck' ] : if os . path . exists ( f ) : os . remove ( f ) del self [ experiment ]
7,853
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L519-L529
[ "def", "get_win32_screen_buffer_info", "(", "self", ")", ":", "# NOTE: We don't call the `GetConsoleScreenBufferInfo` API through", "# `self._winapi`. Doing so causes Python to crash on certain 64bit", "# Python versions. (Reproduced with 64bit Python 2.7.6, on Windows", "# 10). It is not clear why. Possibly, it has to do with passing", "# these objects as an argument, or through *args.", "# The Python documentation contains the following - possibly related - warning:", "# ctypes does not support passing unions or structures with", "# bit-fields to functions by value. While this may work on 32-bit", "# x86, it's not guaranteed by the library to work in the general", "# case. Unions and structures with bit-fields should always be", "# passed to functions by pointer.", "# Also see:", "# - https://github.com/ipython/ipython/issues/10070", "# - https://github.com/jonathanslenders/python-prompt-toolkit/issues/406", "# - https://github.com/jonathanslenders/python-prompt-toolkit/issues/86", "self", ".", "flush", "(", ")", "sbinfo", "=", "CONSOLE_SCREEN_BUFFER_INFO", "(", ")", "success", "=", "windll", ".", "kernel32", ".", "GetConsoleScreenBufferInfo", "(", "self", ".", "hconsole", ",", "byref", "(", "sbinfo", ")", ")", "# success = self._winapi(windll.kernel32.GetConsoleScreenBufferInfo,", "# self.hconsole, byref(sbinfo))", "if", "success", ":", "return", "sbinfo", "else", ":", "raise", "NoConsoleScreenBufferError" ]
Save the project configuration
def save ( self ) : project_paths = OrderedDict ( ) for project , d in OrderedDict ( self ) . items ( ) : if isinstance ( d , dict ) : project_path = d [ 'root' ] fname = osp . join ( project_path , '.project' , '.project.yml' ) if not osp . exists ( osp . dirname ( fname ) ) : os . makedirs ( osp . dirname ( fname ) ) if osp . exists ( fname ) : os . rename ( fname , fname + '~' ) d = self . rel_paths ( copy . deepcopy ( d ) ) safe_dump ( d , fname , default_flow_style = False ) project_paths [ project ] = project_path else : project_paths = self . project_paths [ project ] self . project_paths = project_paths safe_dump ( project_paths , self . all_projects , default_flow_style = False )
7,854
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L691-L713
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Save the entire configuration files
def save ( self ) : self . projects . save ( ) self . experiments . save ( ) safe_dump ( self . global_config , self . _globals_file , default_flow_style = False )
7,855
https://github.com/Chilipp/model-organization/blob/694d1219c7ed7e1b2b17153afa11bdc21169bca2/model_organization/config.py#L748-L755
[ "def", "fi_ssn", "(", "ssn", ",", "allow_temporal_ssn", "=", "True", ")", ":", "if", "not", "ssn", ":", "return", "False", "result", "=", "re", ".", "match", "(", "ssn_pattern", ",", "ssn", ")", "if", "not", "result", ":", "return", "False", "gd", "=", "result", ".", "groupdict", "(", ")", "checksum", "=", "int", "(", "gd", "[", "'date'", "]", "+", "gd", "[", "'serial'", "]", ")", "return", "(", "int", "(", "gd", "[", "'serial'", "]", ")", ">=", "2", "and", "(", "allow_temporal_ssn", "or", "int", "(", "gd", "[", "'serial'", "]", ")", "<=", "899", ")", "and", "ssn_checkmarks", "[", "checksum", "%", "len", "(", "ssn_checkmarks", ")", "]", "==", "gd", "[", "'checksum'", "]", ")" ]
Replace the current widget content with the original text . Note that the original text has styling information available whereas the new text does not .
def reverseCommit ( self ) : self . baseClass . setText ( self . oldText ) self . qteWidget . SCISetStylingEx ( 0 , 0 , self . style )
7,856
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L390-L397
[ "def", "remover", "(", "self", ",", "id_user_group", ")", ":", "if", "not", "is_valid_int_param", "(", "id_user_group", ")", ":", "raise", "InvalidParameterError", "(", "u'Invalid or inexistent user group id.'", ")", "url", "=", "'ugroup/'", "+", "str", "(", "id_user_group", ")", "+", "'/'", "code", ",", "xml", "=", "self", ".", "submit", "(", "None", ",", "'DELETE'", ",", "url", ")", "return", "self", ".", "response", "(", "code", ",", "xml", ")" ]
Try to place the cursor in line at col if possible otherwise place it at the end .
def placeCursor ( self , line , col ) : num_lines , num_col = self . qteWidget . getNumLinesAndColumns ( ) # Place the cursor at the specified position if possible. if line >= num_lines : line , col = num_lines , num_col else : text = self . qteWidget . text ( line ) if col >= len ( text ) : col = len ( text ) - 1 self . qteWidget . setCursorPosition ( line , col )
7,857
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L441-L456
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Put the document into the before state .
def reverseCommit ( self ) : # Put the document into the 'before' state. self . baseClass . setText ( self . textBefore ) self . qteWidget . SCISetStylingEx ( 0 , 0 , self . styleBefore )
7,858
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L476-L482
[ "def", "init", "(", "cls", ",", "conn_string", "=", "None", ")", ":", "if", "conn_string", ":", "_update_meta", "(", "conn_string", ")", "# We initialize the engine within the models module because models'", "# schema can depend on which data types are supported by the engine", "Meta", ".", "Session", "=", "new_sessionmaker", "(", ")", "Meta", ".", "engine", "=", "Meta", ".", "Session", ".", "kw", "[", "\"bind\"", "]", "logger", ".", "info", "(", "f\"Connecting user:{Meta.DBUSER} \"", "f\"to {Meta.DBHOST}:{Meta.DBPORT}/{Meta.DBNAME}\"", ")", "Meta", ".", "_init_db", "(", ")", "if", "not", "Meta", ".", "log_path", ":", "init_logging", "(", ")", "return", "cls" ]
Paste the clipboard data at the current cursor position .
def fromMimeData ( self , data ) : # Only insert the element if it is available in plain text. if data . hasText ( ) : self . insert ( data . text ( ) ) # Tell the underlying QsciScintilla object that the MIME data # object was indeed empty. return ( QtCore . QByteArray ( ) , False )
7,859
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L582-L600
[ "def", "update_swarm", "(", "self", ",", "version", ",", "swarm_spec", "=", "None", ",", "rotate_worker_token", "=", "False", ",", "rotate_manager_token", "=", "False", ")", ":", "url", "=", "self", ".", "_url", "(", "'/swarm/update'", ")", "response", "=", "self", ".", "_post_json", "(", "url", ",", "data", "=", "swarm_spec", ",", "params", "=", "{", "'rotateWorkerToken'", ":", "rotate_worker_token", ",", "'rotateManagerToken'", ":", "rotate_manager_token", ",", "'version'", ":", "version", "}", ")", "self", ".", "_raise_for_status", "(", "response", ")", "return", "True" ]
Undo safe wrapper for the native keyPressEvent method .
def keyPressEvent ( self , keyEvent : QtGui . QKeyEvent ) : undoObj = UndoInsert ( self , keyEvent . text ( ) ) self . qteUndoStack . push ( undoObj )
7,860
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L695-L712
[ "def", "define_Precip", "(", "self", ",", "diameter", ",", "density", ",", "molecweight", ",", "alumMPM", ")", ":", "self", ".", "PrecipDiameter", "=", "diameter", "self", ".", "PrecipDensity", "=", "density", "self", ".", "PrecipMolecWeight", "=", "molecweight", "self", ".", "PrecipAluminumMPM", "=", "alumMPM" ]
Undo safe wrapper for the native replaceSelectedText method .
def replaceSelectedText ( self , text : str ) : undoObj = UndoReplaceSelectedText ( self , text ) self . qteUndoStack . push ( undoObj )
7,861
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L735-L752
[ "def", "_create_epoch_data", "(", "self", ",", "streams", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ")", "->", "EpochData", ":", "if", "streams", "is", "None", ":", "streams", "=", "[", "self", ".", "_train_stream_name", "]", "+", "self", ".", "_extra_streams", "return", "OrderedDict", "(", "[", "(", "stream_name", ",", "OrderedDict", "(", ")", ")", "for", "stream_name", "in", "streams", "]", ")" ]
Undo safe wrapper for the native insert method .
def insert ( self , text : str ) : undoObj = UndoInsert ( self , text ) self . qteUndoStack . push ( undoObj )
7,862
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L755-L772
[ "def", "getAllData", "(", "self", ",", "temp", "=", "True", ",", "accel", "=", "True", ",", "gyro", "=", "True", ")", ":", "allData", "=", "{", "}", "if", "temp", ":", "allData", "[", "\"temp\"", "]", "=", "self", ".", "getTemp", "(", ")", "if", "accel", ":", "allData", "[", "\"accel\"", "]", "=", "self", ".", "getAccelData", "(", "raw", "=", "False", ")", "if", "gyro", ":", "allData", "[", "\"gyro\"", "]", "=", "self", ".", "getGyroData", "(", ")", "return", "allData" ]
Undo safe wrapper for the native insertAt method .
def insertAt ( self , text : str , line : int , col : int ) : undoObj = UndoInsertAt ( self , text , line , col ) self . qteUndoStack . push ( undoObj )
7,863
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L775-L794
[ "def", "getAllData", "(", "self", ",", "temp", "=", "True", ",", "accel", "=", "True", ",", "gyro", "=", "True", ")", ":", "allData", "=", "{", "}", "if", "temp", ":", "allData", "[", "\"temp\"", "]", "=", "self", ".", "getTemp", "(", ")", "if", "accel", ":", "allData", "[", "\"accel\"", "]", "=", "self", ".", "getAccelData", "(", "raw", "=", "False", ")", "if", "gyro", ":", "allData", "[", "\"gyro\"", "]", "=", "self", ".", "getGyroData", "(", ")", "return", "allData" ]
Undo safe wrapper for the native append method .
def append ( self , text : str ) : pos = self . getCursorPosition ( ) line , col = self . getNumLinesAndColumns ( ) undoObj = UndoInsertAt ( self , text , line , col ) self . qteUndoStack . push ( undoObj ) self . setCursorPosition ( * pos )
7,864
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L797-L817
[ "def", "EAS2TAS", "(", "ARSP", ",", "GPS", ",", "BARO", ",", "ground_temp", "=", "25", ")", ":", "tempK", "=", "ground_temp", "+", "273.15", "-", "0.0065", "*", "GPS", ".", "Alt", "return", "sqrt", "(", "1.225", "/", "(", "BARO", ".", "Press", "/", "(", "287.26", "*", "tempK", ")", ")", ")" ]
Undo safe wrapper for the native setText method .
def setText ( self , text : str ) : undoObj = UndoSetText ( self , text ) self . qteUndoStack . push ( undoObj )
7,865
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L820-L837
[ "async", "def", "issueClaims", "(", "self", ",", "allClaimRequest", ":", "Dict", "[", "ID", ",", "ClaimRequest", "]", ")", "->", "Dict", "[", "ID", ",", "Claims", "]", ":", "res", "=", "{", "}", "for", "schemaId", ",", "claimReq", "in", "allClaimRequest", ".", "items", "(", ")", ":", "res", "[", "schemaId", "]", "=", "await", "self", ".", "issueClaim", "(", "schemaId", ",", "claimReq", ")", "return", "res" ]
Pythonic wrapper for the SCI_GETSTYLEDTEXT command .
def SCIGetStyledText ( self , selectionPos : tuple ) : # Sanity check. if not self . isSelectionPositionValid ( selectionPos ) : return None # Convert the start- and end point of the selection into # stream offsets. Ensure that start comes before end. start = self . positionFromLineIndex ( * selectionPos [ : 2 ] ) end = self . positionFromLineIndex ( * selectionPos [ 2 : ] ) if start > end : start , end = end , start # Allocate a large enough buffer. bufSize = 2 * ( end - start ) + 2 buf = bytearray ( bufSize ) # Fetch the text- and styling information. numRet = self . SendScintilla ( self . SCI_GETSTYLEDTEXT , start , end , buf ) # The last two bytes are always Zero according to the # Scintilla documentation, so remove them. buf = buf [ : - 2 ] # Double check that we did not receive more bytes than the buffer # was long. if numRet > bufSize : qteMain . qteLogger . error ( 'SCI_GETSTYLEDTEX function returned more' ' bytes than expected.' ) text = buf [ 0 : : 2 ] style = buf [ 1 : : 2 ] return ( text , style )
7,866
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L840-L895
[ "def", "_page_update", "(", "self", ",", "event", ")", ":", "try", ":", "if", "event", ".", "schema", "==", "'wikipage'", ":", "self", ".", "_update_index", "(", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "\"Page creation notification error: \"", ",", "event", ",", "e", ",", "type", "(", "e", ")", ",", "lvl", "=", "error", ")" ]
Pythonic wrapper for the SCI_SETSTYLING command .
def SCISetStyling ( self , line : int , col : int , numChar : int , style : bytearray ) : if not self . isPositionValid ( line , col ) : return pos = self . positionFromLineIndex ( line , col ) self . SendScintilla ( self . SCI_STARTSTYLING , pos , 0xFF ) self . SendScintilla ( self . SCI_SETSTYLING , numChar , style )
7,867
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L898-L929
[ "def", "delete_snapshots", "(", "name", ",", "*", "names", ",", "*", "*", "kwargs", ")", ":", "deleted", "=", "dict", "(", ")", "conn", "=", "__get_conn", "(", "*", "*", "kwargs", ")", "domain", "=", "_get_domain", "(", "conn", ",", "name", ")", "for", "snap", "in", "domain", ".", "listAllSnapshots", "(", ")", ":", "if", "snap", ".", "getName", "(", ")", "in", "names", "or", "not", "names", ":", "deleted", "[", "snap", ".", "getName", "(", ")", "]", "=", "_parse_snapshot_description", "(", "snap", ")", "snap", ".", "delete", "(", ")", "conn", ".", "close", "(", ")", "available", "=", "{", "name", ":", "[", "_parse_snapshot_description", "(", "snap", ")", "for", "snap", "in", "domain", ".", "listAllSnapshots", "(", ")", "]", "or", "'N/A'", "}", "return", "{", "'available'", ":", "available", ",", "'deleted'", ":", "deleted", "}" ]
Pythonic wrapper for the SCI_SETSTYLINGEX command .
def SCISetStylingEx ( self , line : int , col : int , style : bytearray ) : if not self . isPositionValid ( line , col ) : return pos = self . positionFromLineIndex ( line , col ) self . SendScintilla ( self . SCI_STARTSTYLING , pos , 0xFF ) self . SendScintilla ( self . SCI_SETSTYLINGEX , len ( style ) , style )
7,868
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L932-L961
[ "def", "delete_snapshots", "(", "name", ",", "*", "names", ",", "*", "*", "kwargs", ")", ":", "deleted", "=", "dict", "(", ")", "conn", "=", "__get_conn", "(", "*", "*", "kwargs", ")", "domain", "=", "_get_domain", "(", "conn", ",", "name", ")", "for", "snap", "in", "domain", ".", "listAllSnapshots", "(", ")", ":", "if", "snap", ".", "getName", "(", ")", "in", "names", "or", "not", "names", ":", "deleted", "[", "snap", ".", "getName", "(", ")", "]", "=", "_parse_snapshot_description", "(", "snap", ")", "snap", ".", "delete", "(", ")", "conn", ".", "close", "(", ")", "available", "=", "{", "name", ":", "[", "_parse_snapshot_description", "(", "snap", ")", "for", "snap", "in", "domain", ".", "listAllSnapshots", "(", ")", "]", "or", "'N/A'", "}", "return", "{", "'available'", ":", "available", ",", "'deleted'", ":", "deleted", "}" ]
Specify the lexer to use .
def qteSetLexer ( self , lexer ) : if ( lexer is not None ) and ( not issubclass ( lexer , Qsci . QsciLexer ) ) : QtmacsOtherError ( 'lexer must be a class object and derived from' ' <b>QsciLexer</b>' ) return # Install and backup the lexer class. self . qteLastLexer = lexer if lexer is None : self . setLexer ( None ) else : self . setLexer ( lexer ( ) ) # Make all fonts in the style mono space. self . setMonospace ( )
7,869
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L963-L998
[ "def", "detach_all_classes", "(", "self", ")", ":", "classes", "=", "list", "(", "self", ".", "_observers", ".", "keys", "(", ")", ")", "for", "cls", "in", "classes", ":", "self", ".", "detach_class", "(", "cls", ")" ]
Fix the fonts of the first 32 styles to a mono space one .
def setMonospace ( self ) : font = bytes ( 'courier new' , 'utf-8' ) for ii in range ( 32 ) : self . SendScintilla ( self . SCI_STYLESETFONT , ii , font )
7,870
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L1022-L1040
[ "def", "on_end_validation", "(", "self", ",", "event", ")", ":", "self", ".", "Enable", "(", ")", "self", ".", "Show", "(", ")", "self", ".", "magic_gui_frame", ".", "Destroy", "(", ")" ]
Set the modified state to isModified .
def setModified ( self , isModified : bool ) : if not isModified : self . qteUndoStack . saveState ( ) super ( ) . setModified ( isModified )
7,871
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/extensions/qtmacsscintilla_widget.py#L1043-L1066
[ "def", "retrieve_secret_id", "(", "url", ",", "token", ")", ":", "import", "hvac", "client", "=", "hvac", ".", "Client", "(", "url", "=", "url", ",", "token", "=", "token", ")", "response", "=", "client", ".", "_post", "(", "'/v1/sys/wrapping/unwrap'", ")", "if", "response", ".", "status_code", "==", "200", ":", "data", "=", "response", ".", "json", "(", ")", "return", "data", "[", "'data'", "]", "[", "'secret_id'", "]" ]
Decorator used to declare that property is a tag attribute
def attribute ( func ) : def inner ( self ) : name , attribute_type = func ( self ) if not name : name = func . __name__ try : return attribute_type ( self . et . attrib [ name ] ) except KeyError : raise AttributeError return inner
7,872
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L14-L27
[ "def", "compress_pdf", "(", "pdf_fpath", ",", "output_fname", "=", "None", ")", ":", "import", "utool", "as", "ut", "ut", ".", "assertpath", "(", "pdf_fpath", ")", "suffix", "=", "'_'", "+", "ut", ".", "get_datestamp", "(", "False", ")", "+", "'_compressed'", "print", "(", "'pdf_fpath = %r'", "%", "(", "pdf_fpath", ",", ")", ")", "output_pdf_fpath", "=", "ut", ".", "augpath", "(", "pdf_fpath", ",", "suffix", ",", "newfname", "=", "output_fname", ")", "print", "(", "'output_pdf_fpath = %r'", "%", "(", "output_pdf_fpath", ",", ")", ")", "gs_exe", "=", "find_ghostscript_exe", "(", ")", "cmd_list", "=", "(", "gs_exe", ",", "'-sDEVICE=pdfwrite'", ",", "'-dCompatibilityLevel=1.4'", ",", "'-dNOPAUSE'", ",", "'-dQUIET'", ",", "'-dBATCH'", ",", "'-sOutputFile='", "+", "output_pdf_fpath", ",", "pdf_fpath", ")", "ut", ".", "cmd", "(", "*", "cmd_list", ")", "return", "output_pdf_fpath" ]
Decorator used to declare that the property is xml section
def section ( func ) : def inner ( self ) : return func ( self ) ( self . et . find ( func . __name__ ) ) return inner
7,873
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L40-L46
[ "def", "list_supported_drivers", "(", ")", ":", "def", "convert_oslo_config", "(", "oslo_options", ")", ":", "options", "=", "[", "]", "for", "opt", "in", "oslo_options", ":", "tmp_dict", "=", "{", "k", ":", "str", "(", "v", ")", "for", "k", ",", "v", "in", "vars", "(", "opt", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "}", "options", ".", "append", "(", "tmp_dict", ")", "return", "options", "def", "list_drivers", "(", "queue", ")", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "# Go to the parent directory directory where Cinder is installed", "os", ".", "chdir", "(", "utils", ".", "__file__", ".", "rsplit", "(", "os", ".", "sep", ",", "2", ")", "[", "0", "]", ")", "try", ":", "drivers", "=", "cinder_interface_util", ".", "get_volume_drivers", "(", ")", "mapping", "=", "{", "d", ".", "class_name", ":", "vars", "(", "d", ")", "for", "d", "in", "drivers", "}", "# Drivers contain class instances which are not serializable", "for", "driver", "in", "mapping", ".", "values", "(", ")", ":", "driver", ".", "pop", "(", "'cls'", ",", "None", ")", "if", "'driver_options'", "in", "driver", ":", "driver", "[", "'driver_options'", "]", "=", "convert_oslo_config", "(", "driver", "[", "'driver_options'", "]", ")", "finally", ":", "os", ".", "chdir", "(", "cwd", ")", "queue", ".", "put", "(", "mapping", ")", "# Use a different process to avoid having all driver classes loaded in", "# memory during our execution.", "queue", "=", "multiprocessing", ".", "Queue", "(", ")", "p", "=", "multiprocessing", ".", "Process", "(", "target", "=", "list_drivers", ",", "args", "=", "(", "queue", ",", ")", ")", "p", ".", "start", "(", ")", "result", "=", "queue", ".", "get", "(", ")", "p", ".", "join", "(", ")", "return", "result" ]
Decorator used to declare that the property is attribute of embedded tag
def tag_value ( func ) : def inner ( self ) : tag , attrib , attrib_type = func ( self ) tag_obj = self . et . find ( tag ) if tag_obj is not None : try : return attrib_type ( self . et . find ( tag ) . attrib [ attrib ] ) except KeyError : raise AttributeError return inner
7,874
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L49-L63
[ "def", "url_to_resource", "(", "url", ",", "request", "=", "None", ")", ":", "if", "request", "is", "None", ":", "request", "=", "get_current_request", "(", ")", "# cnv = request.registry.getAdapter(request, IResourceUrlConverter)", "reg", "=", "get_current_registry", "(", ")", "cnv", "=", "reg", ".", "getAdapter", "(", "request", ",", "IResourceUrlConverter", ")", "return", "cnv", ".", "url_to_resource", "(", "url", ")" ]
Decorator used to declare that the setter function is an attribute of embedded tag
def tag_value_setter ( tag , attrib ) : def outer ( func ) : def inner ( self , value ) : tag_elem = self . et . find ( tag ) if tag_elem is None : et = ElementTree . fromstring ( "<{}></{}>" . format ( tag , tag ) ) self . et . append ( et ) tag_elem = self . et . find ( tag ) tag_elem . attrib [ attrib ] = str ( value ) return inner return outer
7,875
https://github.com/vecnet/vecnet.openmalaria/blob/795bc9d1b81a6c664f14879edda7a7c41188e95a/vecnet/openmalaria/scenario/core.py#L66-L81
[ "def", "echo_headers", "(", "headers", ",", "file", "=", "None", ")", ":", "for", "k", ",", "v", "in", "sorted", "(", "headers", ".", "items", "(", ")", ")", ":", "click", ".", "echo", "(", "\"{0}: {1}\"", ".", "format", "(", "k", ".", "title", "(", ")", ",", "v", ")", ",", "file", "=", "file", ")", "click", ".", "echo", "(", "file", "=", "file", ")" ]
Run validation Wraps concrete implementation to ensure custom validators return proper type of result .
def run ( self , value , model = None , context = None ) : res = self . validate ( value , model , context ) if not isinstance ( res , Error ) : err = 'Validator "{}" result must be of type "{}", got "{}"' raise InvalidErrorType ( err . format ( self . __class__ . __name__ , Error , type ( res ) ) ) return res
7,876
https://github.com/projectshift/shift-schema/blob/07787b540d3369bb37217ffbfbe629118edaf0eb/shiftschema/validators/abstract_validator.py#L29-L49
[ "def", "write_two_phases", "(", "filename", ",", "data", ",", "io", ")", ":", "write_flag", "=", "True", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r'", ")", "as", "file", ":", "old_data", "=", "file", ".", "read", "(", ")", "if", "data", "==", "old_data", ":", "write_flag", "=", "False", "if", "write_flag", ":", "tmp_filename", "=", "filename", "+", "'.tmp'", "with", "open", "(", "tmp_filename", ",", "'w+'", ")", "as", "file", ":", "file", ".", "write", "(", "data", ")", "os", ".", "replace", "(", "tmp_filename", ",", "filename", ")", "io", ".", "text", "(", "'Wrote: <fso>{0}</fso>'", ".", "format", "(", "filename", ")", ")", "else", ":", "io", ".", "text", "(", "'File <fso>{0}</fso> is up to date'", ".", "format", "(", "filename", ")", ")" ]
Returns a list of all the data gathered from the engine iterable .
def _collect_data ( self ) : all_data = [ ] for line in self . engine . run_engine ( ) : logging . debug ( "Adding {} to all_data" . format ( line ) ) all_data . append ( line . copy ( ) ) logging . debug ( "all_data is now {}" . format ( all_data ) ) return all_data
7,877
https://github.com/alextricity25/dwell_in_you_richly/blob/e705e1bc4fc0b8d2aa25680dfc432762b361c783/diyr/sinks/base.py#L23-L34
[ "def", "vnormg", "(", "v", ",", "ndim", ")", ":", "v", "=", "stypes", ".", "toDoubleVector", "(", "v", ")", "ndim", "=", "ctypes", ".", "c_int", "(", "ndim", ")", "return", "libspice", ".", "vnormg_c", "(", "v", ",", "ndim", ")" ]
Get module name from module file name .
def _get_module_name_from_fname ( fname ) : fname = fname . replace ( ".pyc" , ".py" ) for mobj in sys . modules . values ( ) : if ( hasattr ( mobj , "__file__" ) and mobj . __file__ and ( mobj . __file__ . replace ( ".pyc" , ".py" ) == fname ) ) : module_name = mobj . __name__ return module_name raise RuntimeError ( "Module could not be found" )
7,878
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L40-L51
[ "def", "merge", "(", "self", ",", "keys", ")", ":", "deletes", "=", "[", "]", "for", "pseudo_key", ",", "rows", "in", "self", ".", "_rows", ".", "items", "(", ")", ":", "self", ".", "_additional_rows_date2int", "(", "keys", ",", "rows", ")", "rows", "=", "self", ".", "_intersection", "(", "keys", ",", "rows", ")", "if", "rows", ":", "rows", "=", "self", ".", "_rows_sort", "(", "rows", ")", "self", ".", "_rows", "[", "pseudo_key", "]", "=", "self", ".", "_merge_adjacent_rows", "(", "rows", ")", "else", ":", "deletes", ".", "append", "(", "pseudo_key", ")", "for", "pseudo_key", "in", "deletes", ":", "del", "self", ".", "_rows", "[", "pseudo_key", "]" ]
Return tuple of the function argument names in the order of the function signature .
def get_function_args ( func , no_self = False , no_varargs = False ) : par_dict = signature ( func ) . parameters # Mark positional and/or keyword arguments (if any) pos = lambda x : x . kind == Parameter . VAR_POSITIONAL kw = lambda x : x . kind == Parameter . VAR_KEYWORD opts = [ "" , "*" , "**" ] args = [ "{prefix}{arg}" . format ( prefix = opts [ pos ( value ) + 2 * kw ( value ) ] , arg = par ) for par , value in par_dict . items ( ) ] # Filter out 'self' from parameter list (optional) self_filtered_args = ( args if not args else ( args [ 1 if ( args [ 0 ] == "self" ) and no_self else 0 : ] ) ) # Filter out positional or keyword arguments (optional) pos = lambda x : ( len ( x ) > 1 ) and ( x [ 0 ] == "*" ) and ( x [ 1 ] != "*" ) kw = lambda x : ( len ( x ) > 2 ) and ( x [ : 2 ] == "**" ) varargs_filtered_args = [ arg for arg in self_filtered_args if ( not no_varargs ) or all ( [ no_varargs , not pos ( arg ) , not kw ( arg ) ] ) ] return tuple ( varargs_filtered_args )
7,879
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L66-L126
[ "async", "def", "set_max_ch_setpoint", "(", "self", ",", "temperature", ",", "timeout", "=", "OTGW_DEFAULT_TIMEOUT", ")", ":", "cmd", "=", "OTGW_CMD_SET_MAX", "status", "=", "{", "}", "ret", "=", "await", "self", ".", "_wait_for_cmd", "(", "cmd", ",", "temperature", ",", "timeout", ")", "if", "ret", "is", "None", ":", "return", "ret", "=", "float", "(", "ret", ")", "status", "[", "DATA_MAX_CH_SETPOINT", "]", "=", "ret", "self", ".", "_update_status", "(", "status", ")", "return", "ret" ]
r Retrieve the module name from a module object .
def get_module_name ( module_obj ) : if not is_object_module ( module_obj ) : raise RuntimeError ( "Argument `module_obj` is not valid" ) name = module_obj . __name__ msg = "Module object `{name}` could not be found in loaded modules" if name not in sys . modules : raise RuntimeError ( msg . format ( name = name ) ) return name
7,880
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L129-L156
[ "def", "hacking_no_cr", "(", "physical_line", ")", ":", "pos", "=", "physical_line", ".", "find", "(", "'\\r'", ")", "if", "pos", "!=", "-", "1", "and", "pos", "==", "(", "len", "(", "physical_line", ")", "-", "2", ")", ":", "return", "(", "pos", ",", "\"H903: Windows style line endings not allowed in code\"", ")" ]
Yield private properties of an object .
def private_props ( obj ) : # Get private properties but NOT magic methods props = [ item for item in dir ( obj ) ] priv_props = [ _PRIVATE_PROP_REGEXP . match ( item ) for item in props ] call_props = [ callable ( getattr ( obj , item ) ) for item in props ] iobj = zip ( props , priv_props , call_props ) for obj_name in [ prop for prop , priv , call in iobj if priv and ( not call ) ] : yield obj_name
7,881
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L183-L201
[ "def", "swo_set_emu_buffer_size", "(", "self", ",", "buf_size", ")", ":", "buf", "=", "ctypes", ".", "c_uint32", "(", "buf_size", ")", "res", "=", "self", ".", "_dll", ".", "JLINKARM_SWO_Control", "(", "enums", ".", "JLinkSWOCommands", ".", "SET_BUFFERSIZE_EMU", ",", "ctypes", ".", "byref", "(", "buf", ")", ")", "if", "res", "<", "0", ":", "raise", "errors", ".", "JLinkException", "(", "res", ")", "return", "None" ]
Check that intersection of two objects has the same information .
def _check_intersection ( self , other ) : # pylint: disable=C0123 props = [ "_callables_db" , "_reverse_callables_db" , "_modules_dict" ] for prop in props : self_dict = getattr ( self , prop ) other_dict = getattr ( other , prop ) keys_self = set ( self_dict . keys ( ) ) keys_other = set ( other_dict . keys ( ) ) for key in keys_self & keys_other : svalue = self_dict [ key ] ovalue = other_dict [ key ] same_type = type ( svalue ) == type ( ovalue ) if same_type : list_comp = isinstance ( svalue , list ) and any ( [ item not in svalue for item in ovalue ] ) str_comp = isinstance ( svalue , str ) and svalue != ovalue dict_comp = isinstance ( svalue , dict ) and svalue != ovalue comp = any ( [ list_comp , str_comp , dict_comp ] ) if ( not same_type ) or ( same_type and comp ) : emsg = "Conflicting information between objects" raise RuntimeError ( emsg )
7,882
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L484-L506
[ "def", "_build_processor", "(", "cls", ",", "session", ":", "AppSession", ")", ":", "web_processor", "=", "cls", ".", "_build_web_processor", "(", "session", ")", "ftp_processor", "=", "cls", ".", "_build_ftp_processor", "(", "session", ")", "delegate_processor", "=", "session", ".", "factory", ".", "new", "(", "'Processor'", ")", "delegate_processor", ".", "register", "(", "'http'", ",", "web_processor", ")", "delegate_processor", ".", "register", "(", "'https'", ",", "web_processor", ")", "delegate_processor", ".", "register", "(", "'ftp'", ",", "ftp_processor", ")" ]
Get the callable that the line number belongs to .
def get_callable_from_line ( self , module_file , lineno ) : module_name = _get_module_name_from_fname ( module_file ) if module_name not in self . _modules_dict : self . trace ( [ module_file ] ) ret = None # Sort callables by starting line number iobj = sorted ( self . _modules_dict [ module_name ] , key = lambda x : x [ "code_id" ] [ 1 ] ) for value in iobj : if value [ "code_id" ] [ 1 ] <= lineno <= value [ "last_lineno" ] : ret = value [ "name" ] elif value [ "code_id" ] [ 1 ] > lineno : break return ret if ret else module_name
7,883
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L512-L525
[ "def", "_reconstruct_data", "(", "values", ",", "dtype", ",", "original", ")", ":", "from", "pandas", "import", "Index", "if", "is_extension_array_dtype", "(", "dtype", ")", ":", "values", "=", "dtype", ".", "construct_array_type", "(", ")", ".", "_from_sequence", "(", "values", ")", "elif", "is_datetime64tz_dtype", "(", "dtype", ")", "or", "is_period_dtype", "(", "dtype", ")", ":", "values", "=", "Index", "(", "original", ")", ".", "_shallow_copy", "(", "values", ",", "name", "=", "None", ")", "elif", "is_bool_dtype", "(", "dtype", ")", ":", "values", "=", "values", ".", "astype", "(", "dtype", ")", "# we only support object dtypes bool Index", "if", "isinstance", "(", "original", ",", "Index", ")", ":", "values", "=", "values", ".", "astype", "(", "object", ")", "elif", "dtype", "is", "not", "None", ":", "values", "=", "values", ".", "astype", "(", "dtype", ")", "return", "values" ]
Re - traces modules modified since the time they were traced .
def refresh ( self ) : self . trace ( list ( self . _fnames . keys ( ) ) , _refresh = True )
7,884
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L578-L580
[ "def", "get_symbols_with_positive_balances", "(", "self", ")", "->", "List", "[", "str", "]", ":", "from", "gnucash_portfolio", "import", "BookAggregate", "holdings", "=", "[", "]", "with", "BookAggregate", "(", ")", "as", "book", ":", "# query = book.securities.query.filter(Commodity.)", "holding_entities", "=", "book", ".", "securities", ".", "get_all", "(", ")", "for", "item", "in", "holding_entities", ":", "# Check holding balance", "agg", "=", "book", ".", "securities", ".", "get_aggregate", "(", "item", ")", "balance", "=", "agg", ".", "get_num_shares", "(", ")", "if", "balance", ">", "Decimal", "(", "0", ")", ":", "holdings", ".", "append", "(", "f\"{item.namespace}:{item.mnemonic}\"", ")", "else", ":", "self", ".", "logger", ".", "debug", "(", "f\"0 balance for {item}\"", ")", "# holdings = map(lambda x: , holding_entities)", "return", "holdings" ]
r Save traced modules information to a JSON _ file .
def save ( self , callables_fname ) : # Validate file name _validate_fname ( callables_fname ) # JSON keys have to be strings but the reverse callables dictionary # keys are tuples, where the first item is a file name and the # second item is the starting line of the callable within that file # (dictionary value), thus need to convert the key to a string items = self . _reverse_callables_db . items ( ) fdict = { "_callables_db" : self . _callables_db , "_reverse_callables_db" : dict ( [ ( str ( k ) , v ) for k , v in items ] ) , "_modules_dict" : self . _modules_dict , "_fnames" : self . _fnames , "_module_names" : self . _module_names , "_class_names" : self . _class_names , } with open ( callables_fname , "w" ) as fobj : json . dump ( fdict , fobj )
7,885
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L582-L609
[ "def", "update_count", "(", "self", ")", ":", "node_rating_count", "=", "self", ".", "node", ".", "rating_count", "node_rating_count", ".", "rating_count", "=", "self", ".", "node", ".", "rating_set", ".", "count", "(", ")", "node_rating_count", ".", "rating_avg", "=", "self", ".", "node", ".", "rating_set", ".", "aggregate", "(", "rate", "=", "Avg", "(", "'value'", ")", ")", "[", "'rate'", "]", "# if all ratings are deleted the value will be None!", "if", "node_rating_count", ".", "rating_avg", "is", "None", ":", "# set to 0 otherwise we'll get an exception", "node_rating_count", ".", "rating_avg", "=", "0", "node_rating_count", ".", "save", "(", ")" ]
Record last line number of callable .
def _close_callable ( self , node , force = False ) : # Only nodes that have a line number can be considered for closing # callables. Similarly, only nodes with lines greater than the one # already processed can be considered for closing callables try : lineno = node . lineno except AttributeError : return if lineno <= self . _processed_line : return # [[[cog # code = """ # print(pcolor('Close callable @ line = {0}'.format(lineno), 'green')) # """ # cog.out(code) # ]]] # [[[end]]] # Extract node name for property closing. Once a property is found, # it can only be closed out by a node type that has a name name = "" try : name = ( node . name if hasattr ( node , "name" ) else ( node . targets [ 0 ] . id if hasattr ( node . targets [ 0 ] , "id" ) else node . targets [ 0 ] . value . id ) ) except AttributeError : pass # Traverse backwards through call stack and close callables as needed indent = self . _get_indent ( node ) count = - 1 # [[[cog # code = """ # print( # pcolor( # ' Name {0} @ {1}, indent = {2}'.format( # name if name else 'None', lineno, indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] dlist = [ ] while count >= - len ( self . _indent_stack ) : element_full_name = self . _indent_stack [ count ] [ "full_name" ] edict = self . _callables_db . get ( element_full_name , None ) stack_indent = self . _indent_stack [ count ] [ "level" ] open_callable = element_full_name and ( not edict [ "last_lineno" ] ) # [[[cog # code = """ # print( # pcolor( # ' Name {0}, indent, {1}, stack_indent {2}'.format( # element_full_name, indent, stack_indent # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] if open_callable and ( force or ( indent < stack_indent ) or ( ( indent == stack_indent ) and ( ( edict [ "type" ] != "prop" ) or ( ( edict [ "type" ] == "prop" ) and ( name and ( name != element_full_name ) ) ) ) ) ) : # [[[cog # code = """ # print( # pcolor( # ' Closing {0} @ {1}'.format( # element_full_name, lineno-1 # ), # 'yellow' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] edict [ "last_lineno" ] = lineno - 1 dlist . append ( count ) if indent > stack_indent : break count -= 1 # Callables have to be removed from stack when they are closed, # otherwise if a callable is subsequently followed after a few # lines by another callable at a further indentation level (like a for # loop) the second callable would incorrectly appear within the scope # of the first callable stack = self . _indent_stack stack_length = len ( self . _indent_stack ) dlist = [ item for item in dlist if stack [ item ] [ "type" ] != "module" ] for item in dlist : del self . _indent_stack [ stack_length + item ]
7,886
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L791-L903
[ "def", "create", "(", "cls", ",", "destination", ")", ":", "mdb_gz_b64", "=", "\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"", "pristine", "=", "StringIO", "(", ")", "pristine", ".", "write", "(", "base64", ".", "b64decode", "(", "mdb_gz_b64", ")", ")", "pristine", ".", "seek", "(", "0", ")", "pristine", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "pristine", ",", "mode", "=", "'rb'", ")", "with", "open", "(", "destination", ",", "'wb'", ")", "as", "handle", ":", "shutil", ".", "copyfileobj", "(", "pristine", ",", "handle", ")", "return", "cls", "(", "destination", ")" ]
Get node indentation level .
def _get_indent ( self , node ) : lineno = node . lineno if lineno > len ( self . _lines ) : return - 1 wsindent = self . _wsregexp . match ( self . _lines [ lineno - 1 ] ) return len ( wsindent . group ( 1 ) )
7,887
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L905-L911
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Find if callable is function or method .
def _in_class ( self , node ) : # Move left one indentation level and check if that callable is a class indent = self . _get_indent ( node ) for indent_dict in reversed ( self . _indent_stack ) : # pragma: no branch if ( indent_dict [ "level" ] < indent ) or ( indent_dict [ "type" ] == "module" ) : return indent_dict [ "type" ] == "class"
7,888
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L913-L919
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Get callable full name .
def _pop_indent_stack ( self , node , node_type = None , action = None ) : indent = self . _get_indent ( node ) indent_stack = copy . deepcopy ( self . _indent_stack ) # Find enclosing scope while ( len ( indent_stack ) > 1 ) and ( ( ( indent <= indent_stack [ - 1 ] [ "level" ] ) and ( indent_stack [ - 1 ] [ "type" ] != "module" ) ) or ( indent_stack [ - 1 ] [ "type" ] == "prop" ) ) : self . _close_callable ( node ) indent_stack . pop ( ) # Construct new callable name name = ( ( node . targets [ 0 ] . id if hasattr ( node . targets [ 0 ] , "id" ) else node . targets [ 0 ] . value . id ) if node_type == "prop" else node . name ) element_full_name = "." . join ( [ self . _module ] + [ indent_dict [ "prefix" ] for indent_dict in indent_stack if indent_dict [ "type" ] != "module" ] + [ name ] ) + ( "({0})" . format ( action ) if action else "" ) # Add new callable entry to indentation stack self . _indent_stack = indent_stack self . _indent_stack . append ( { "level" : indent , "prefix" : name , "type" : node_type , "full_name" : element_full_name , "lineno" : node . lineno , } ) return element_full_name
7,889
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L921-L965
[ "def", "mysql_aes_decrypt", "(", "encrypted_val", ",", "key", ")", ":", "assert", "isinstance", "(", "encrypted_val", ",", "binary_type", ")", "or", "isinstance", "(", "encrypted_val", ",", "text_type", ")", "assert", "isinstance", "(", "key", ",", "binary_type", ")", "or", "isinstance", "(", "key", ",", "text_type", ")", "k", "=", "_mysql_aes_key", "(", "_to_binary", "(", "key", ")", ")", "d", "=", "_mysql_aes_engine", "(", "_to_binary", "(", "k", ")", ")", ".", "decryptor", "(", ")", "return", "_mysql_aes_unpad", "(", "d", ".", "update", "(", "_to_binary", "(", "encrypted_val", ")", ")", "+", "d", ".", "finalize", "(", ")", ")" ]
Implement generic node .
def generic_visit ( self , node ) : # [[[cog # cog.out("print(pcolor('Enter generic visitor', 'magenta'))") # ]]] # [[[end]]] # A generic visitor that potentially closes callables is needed to # close enclosed callables that are not at the end of the enclosing # callable, otherwise the ending line of the enclosed callable would # be the ending line of the enclosing callable, which would be # incorrect self . _close_callable ( node ) super ( _AstTreeScanner , self ) . generic_visit ( node )
7,890
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L967-L979
[ "def", "compose", "(", "list_of_files", ",", "destination_file", ",", "files_metadata", "=", "None", ",", "content_type", "=", "None", ",", "retry_params", "=", "None", ",", "_account_id", "=", "None", ")", ":", "api", "=", "storage_api", ".", "_get_storage_api", "(", "retry_params", "=", "retry_params", ",", "account_id", "=", "_account_id", ")", "if", "os", ".", "getenv", "(", "'SERVER_SOFTWARE'", ")", ".", "startswith", "(", "'Dev'", ")", ":", "def", "_temp_func", "(", "file_list", ",", "destination_file", ",", "content_type", ")", ":", "bucket", "=", "'/'", "+", "destination_file", ".", "split", "(", "'/'", ")", "[", "1", "]", "+", "'/'", "with", "open", "(", "destination_file", ",", "'w'", ",", "content_type", "=", "content_type", ")", "as", "gcs_merge", ":", "for", "source_file", "in", "file_list", ":", "with", "open", "(", "bucket", "+", "source_file", "[", "'Name'", "]", ",", "'r'", ")", "as", "gcs_source", ":", "gcs_merge", ".", "write", "(", "gcs_source", ".", "read", "(", ")", ")", "compose_object", "=", "_temp_func", "else", ":", "compose_object", "=", "api", ".", "compose_object", "file_list", ",", "_", "=", "_validate_compose_list", "(", "destination_file", ",", "list_of_files", ",", "files_metadata", ",", "32", ")", "compose_object", "(", "file_list", ",", "destination_file", ",", "content_type", ")" ]
Implement assignment walker .
def visit_Assign ( self , node ) : # [[[cog # cog.out("print(pcolor('Enter assign visitor', 'magenta'))") # ]]] # [[[end]]] # ### # Class-level assignment may also be a class attribute that is not # a managed attribute, record it anyway, no harm in doing so as it # is not attached to a callable if self . _in_class ( node ) : element_full_name = self . _pop_indent_stack ( node , "prop" ) code_id = ( self . _fname , node . lineno ) self . _processed_line = node . lineno self . _callables_db [ element_full_name ] = { "name" : element_full_name , "type" : "prop" , "code_id" : code_id , "last_lineno" : None , } self . _reverse_callables_db [ code_id ] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting property {0} @ {1}'.format( # element_full_name, code_id[1] # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] # Get property actions self . generic_visit ( node )
7,891
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L989-L1029
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
Implement class walker .
def visit_ClassDef ( self , node ) : # [[[cog # cog.out("print(pcolor('Enter class visitor', 'magenta'))") # ]]] # [[[end]]] # Get class information (name, line number, etc.) element_full_name = self . _pop_indent_stack ( node , "class" ) code_id = ( self . _fname , node . lineno ) self . _processed_line = node . lineno # Add class entry to dictionaries self . _class_names . append ( element_full_name ) self . _callables_db [ element_full_name ] = { "name" : element_full_name , "type" : "class" , "code_id" : code_id , "last_lineno" : None , } self . _reverse_callables_db [ code_id ] = element_full_name # [[[cog # code = """ # print( # pcolor( # 'Visiting class {0} @ {1}, indent = {2}'.format( # element_full_name, code_id[1], self._get_indent(node) # ), # 'green' # ) # ) # """ # cog.out(code) # ]]] # [[[end]]] self . generic_visit ( node )
7,892
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/pexdoc/pinspect.py#L1031-L1064
[ "def", "aux", "(", "self", ",", "aux", ")", ":", "if", "aux", "==", "self", ".", "_aux", ":", "return", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "if", "aux", "is", "not", "None", ":", "self", ".", "_aux", "=", "self", ".", "_manager", ".", "port_manager", ".", "reserve_tcp_port", "(", "aux", ",", "self", ".", "_project", ")", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: aux port set to {port}\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ",", "port", "=", "aux", ")", ")" ]
run doit using task_creators
def run ( task_creators , args , task_selectors = [ ] ) : if args . reset_dep : sys . exit ( DoitMain ( WrapitLoader ( args , task_creators ) ) . run ( [ 'reset-dep' ] ) ) else : sys . exit ( DoitMain ( WrapitLoader ( args , task_creators ) ) . run ( task_selectors ) )
7,893
https://github.com/rbeagrie/wrapit/blob/ee01c20cca0a9e51c62fb73c894227e36d9abaaa/wrapit/api.py#L7-L15
[ "def", "validate_file", "(", "fn", ",", "options", "=", "None", ")", ":", "file_results", "=", "FileValidationResults", "(", "filepath", "=", "fn", ")", "output", ".", "info", "(", "\"Performing JSON schema validation on %s\"", "%", "fn", ")", "if", "not", "options", ":", "options", "=", "ValidationOptions", "(", "files", "=", "fn", ")", "try", ":", "with", "open", "(", "fn", ")", "as", "instance_file", ":", "file_results", ".", "object_results", "=", "validate", "(", "instance_file", ",", "options", ")", "except", "Exception", "as", "ex", ":", "if", "'Expecting value'", "in", "str", "(", "ex", ")", ":", "line_no", "=", "str", "(", "ex", ")", ".", "split", "(", ")", "[", "3", "]", "file_results", ".", "fatal", "=", "ValidationErrorResults", "(", "'Invalid JSON input on line %s'", "%", "line_no", ")", "else", ":", "file_results", ".", "fatal", "=", "ValidationErrorResults", "(", "ex", ")", "msg", "=", "(", "\"Unexpected error occurred with file '{fn}'. No further \"", "\"validation will be performed: {error}\"", ")", "output", ".", "info", "(", "msg", ".", "format", "(", "fn", "=", "fn", ",", "error", "=", "str", "(", "ex", ")", ")", ")", "file_results", ".", "is_valid", "=", "(", "all", "(", "object_result", ".", "is_valid", "for", "object_result", "in", "file_results", ".", "object_results", ")", "and", "not", "file_results", ".", "fatal", ")", "return", "file_results" ]
Get tasks from registry by its tag
def tasks_by_tag ( self , registry_tag ) : if registry_tag not in self . __registry . keys ( ) : return None tasks = self . __registry [ registry_tag ] return tasks if self . __multiple_tasks_per_tag__ is True else tasks [ 0 ]
7,894
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L201-L211
[ "def", "compress_pdf", "(", "pdf_fpath", ",", "output_fname", "=", "None", ")", ":", "import", "utool", "as", "ut", "ut", ".", "assertpath", "(", "pdf_fpath", ")", "suffix", "=", "'_'", "+", "ut", ".", "get_datestamp", "(", "False", ")", "+", "'_compressed'", "print", "(", "'pdf_fpath = %r'", "%", "(", "pdf_fpath", ",", ")", ")", "output_pdf_fpath", "=", "ut", ".", "augpath", "(", "pdf_fpath", ",", "suffix", ",", "newfname", "=", "output_fname", ")", "print", "(", "'output_pdf_fpath = %r'", "%", "(", "output_pdf_fpath", ",", ")", ")", "gs_exe", "=", "find_ghostscript_exe", "(", ")", "cmd_list", "=", "(", "gs_exe", ",", "'-sDEVICE=pdfwrite'", ",", "'-dCompatibilityLevel=1.4'", ",", "'-dNOPAUSE'", ",", "'-dQUIET'", ",", "'-dBATCH'", ",", "'-sOutputFile='", "+", "output_pdf_fpath", ",", "pdf_fpath", ")", "ut", ".", "cmd", "(", "*", "cmd_list", ")", "return", "output_pdf_fpath" ]
Registered task count
def count ( self ) : result = 0 for tasks in self . __registry . values ( ) : result += len ( tasks ) return result
7,895
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L230-L238
[ "def", "init", "(", "self", ",", "archive", ")", ":", "from", "os", "import", "makedirs", ",", "path", ",", "chdir", ",", "system", ",", "getcwd", "self", ".", "repodir", "=", "path", ".", "abspath", "(", "path", ".", "expanduser", "(", "self", ".", "repo", ".", "staging", ")", ")", "if", "(", "\"stage\"", "in", "archive", "and", "path", ".", "isdir", "(", "archive", "[", "\"stage\"", "]", ")", "and", "self", ".", "repodir", "!=", "archive", "[", "\"stage\"", "]", "and", "archive", "[", "\"stage\"", "]", "is", "not", "None", ")", ":", "#We have a previous attempt in a different staging directory to clean.", "from", "shutil", "import", "rmtree", "rmtree", "(", "archive", "[", "\"stage\"", "]", ")", "if", "not", "path", ".", "isdir", "(", "self", ".", "repodir", ")", ":", "makedirs", "(", "self", ".", "repodir", ")", "#Copy across all the static files so that we don't have to download them", "#again and chew up the bandwidth. We don't have to copy files that already", "#exist in the local repo.", "self", ".", "repo", ".", "static", ".", "copy", "(", "self", ".", "repodir", ")", "cwd", "=", "getcwd", "(", ")", "chdir", "(", "self", ".", "repodir", ")", "if", "not", "self", ".", "_is_gitted", "(", ")", ":", "#Next we need to initialize the git repo, then add all the static files", "#and folders to be tracked so that when we pull from origin master they", "#can be merged into the repo without re-downloading them.", "system", "(", "\"git init\"", ")", "if", "not", "self", ".", "testmode", ":", "system", "(", "\"git remote add origin {}.git\"", ".", "format", "(", "self", ".", "repo", ".", "repo", ".", "html_url", ")", ")", "for", "file", "in", "self", ".", "repo", ".", "static", ".", "files", ":", "#Here the 2:: removes the ./ specifying the path relative to the git", "#repository root. It is added by convention in the config files.", "system", "(", "\"git add {}\"", ".", "format", "(", "file", "[", "\"target\"", "]", "[", "2", ":", ":", "]", ")", ")", "for", "folder", "in", "self", ".", "repo", ".", "static", ".", "folders", ":", "system", "(", "\"git add {}\"", ".", "format", "(", "file", "[", "\"target\"", "]", "[", "2", ":", ":", "]", ")", ")", "#Now sync with the master branch so that we get everything else that isn't", "#static. Also, fetch the changes from the pull request head so that we", "#can merge them into a new branch for unit testing.", "if", "not", "self", ".", "testmode", ":", "system", "(", "\"git pull origin master\"", ")", "#Even though we have initialized the repo before, we still need to fetch the", "#pull request we are wanting to merge in.", "if", "not", "self", ".", "testmode", ":", "system", "(", "\"git fetch origin pull/{0}/head:testing_{0}\"", ".", "format", "(", "self", ".", "pull", ".", "number", ")", ")", "system", "(", "\"git checkout testing_{}\"", ".", "format", "(", "pull", ".", "number", ")", ")", "#The local repo now has the pull request's proposed changes and is ready", "#to be unit tested.", "chdir", "(", "cwd", ")" ]
Add task class to storage
def add ( cls , task_cls ) : if task_cls . __registry_tag__ is None and cls . __skip_none_registry_tag__ is True : return cls . registry_storage ( ) . add ( task_cls )
7,896
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/registry.py#L270-L280
[ "def", "get_option_parser", "(", ")", ":", "import", "optparse", "prs", "=", "optparse", ".", "OptionParser", "(", "usage", "=", "(", "\"$0 pyrpo [-h] [-v] [-q] [-s .] \"", "\"[-r <report>] [--thg]\"", ")", ")", "prs", ".", "add_option", "(", "'-s'", ",", "'--scan'", ",", "dest", "=", "'scan'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "help", "=", "'Path(s) to scan for repositories'", ")", "prs", ".", "add_option", "(", "'-r'", ",", "'--report'", ",", "dest", "=", "'reports'", ",", "action", "=", "'append'", ",", "default", "=", "[", "]", ",", "help", "=", "(", "\"\"\"origin, status, full, gitmodule, json, sh, \"\"\"", "\"\"\"str, pip, hgsub\"\"\"", ")", ")", "prs", ".", "add_option", "(", "'--thg'", ",", "dest", "=", "'thg_report'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Write a thg-reporegistry.xml file to stdout'", ")", "prs", ".", "add_option", "(", "'--template'", ",", "dest", "=", "'report_template'", ",", "action", "=", "'store'", ",", "help", "=", "'Report template'", ")", "prs", ".", "add_option", "(", "'-v'", ",", "'--verbose'", ",", "dest", "=", "'verbose'", ",", "action", "=", "'store_true'", ",", ")", "prs", ".", "add_option", "(", "'-q'", ",", "'--quiet'", ",", "dest", "=", "'quiet'", ",", "action", "=", "'store_true'", ",", ")", "return", "prs" ]
Sample function .
def my_func ( name ) : # Add exception exobj = addex ( TypeError , "Argument `name` is not valid" ) # Conditionally raise exception exobj ( not isinstance ( name , str ) ) print ( "My name is {0}" . format ( name ) )
7,897
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/docs/support/exh_example.py#L10-L16
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Add command with the specified priority
def add_prioritized ( self , command_obj , priority ) : if priority not in self . __priorities . keys ( ) : self . __priorities [ priority ] = [ ] self . __priorities [ priority ] . append ( command_obj )
7,898
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/command/command.py#L205-L215
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Check if there are any tracked variable inside the result . And keep them for future use .
def __track_vars ( self , command_result ) : command_env = command_result . environment ( ) for var_name in self . tracked_vars ( ) : if var_name in command_env . keys ( ) : self . __vars [ var_name ] = command_env [ var_name ]
7,899
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/command/command.py#L320-L330
[ "def", "weight_color_hsl", "(", "weight", ",", "weight_range", ",", "min_lightness", "=", "0.8", ")", ":", "# type: (float, float, float) -> _HSL_COLOR", "hue", "=", "_hue", "(", "weight", ")", "saturation", "=", "1", "rel_weight", "=", "(", "abs", "(", "weight", ")", "/", "weight_range", ")", "**", "0.7", "lightness", "=", "1.0", "-", "(", "1", "-", "min_lightness", ")", "*", "rel_weight", "return", "hue", ",", "saturation", ",", "lightness" ]