query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Apply filters to the pileup elements and return a new Pileup with the filtered elements removed .
def filter ( self , filters ) : new_elements = [ e for e in self . elements if all ( function ( e ) for function in filters ) ] return Pileup ( self . locus , new_elements )
10,200
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/read_evidence/pileup.py#L72-L86
[ "async", "def", "create_lease_if_not_exists_async", "(", "self", ",", "partition_id", ")", ":", "return_lease", "=", "None", "try", ":", "return_lease", "=", "AzureBlobLease", "(", ")", "return_lease", ".", "partition_id", "=", "partition_id", "serializable_lease", "=", "return_lease", ".", "serializable", "(", ")", "json_lease", "=", "json", ".", "dumps", "(", "serializable_lease", ")", "_logger", ".", "info", "(", "\"Creating Lease %r %r %r\"", ",", "self", ".", "lease_container_name", ",", "partition_id", ",", "json", ".", "dumps", "(", "{", "k", ":", "v", "for", "k", ",", "v", "in", "serializable_lease", ".", "items", "(", ")", "if", "k", "!=", "'event_processor_context'", "}", ")", ")", "await", "self", ".", "host", ".", "loop", ".", "run_in_executor", "(", "self", ".", "executor", ",", "functools", ".", "partial", "(", "self", ".", "storage_client", ".", "create_blob_from_text", ",", "self", ".", "lease_container_name", ",", "partition_id", ",", "json_lease", ")", ")", "except", "Exception", ":", "# pylint: disable=broad-except", "try", ":", "return_lease", "=", "await", "self", ".", "get_lease_async", "(", "partition_id", ")", "except", "Exception", "as", "err", ":", "# pylint: disable=broad-except", "_logger", ".", "error", "(", "\"Failed to create lease %r\"", ",", "err", ")", "raise", "err", "return", "return_lease" ]
Runs the decorated function in a new task
def new_task ( func ) : @ wraps ( func ) async def wrapper ( self , * args , * * kwargs ) : loop = get_event_loop ( ) loop . create_task ( func ( self , * args , * * kwargs ) ) return wrapper
10,201
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/base.py#L65-L74
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Iterates over all instances of analytics provider found in configuration
async def providers ( ) : for provider in settings . ANALYTICS_PROVIDERS : cls : BaseAnalytics = import_class ( provider [ 'class' ] ) yield await cls . instance ( * provider [ 'args' ] )
10,202
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/base.py#L77-L84
[ "def", "unshare", "(", "flags", ")", ":", "res", "=", "lib", ".", "unshare", "(", "flags", ")", "if", "res", "!=", "0", ":", "_check_error", "(", "ffi", ".", "errno", ")" ]
Track the view of a page
async def page_view ( self , url : str , title : str , user_id : str , user_lang : str = '' ) -> None : raise NotImplementedError
10,203
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/base.py#L37-L46
[ "def", "convert_to_experiment_list", "(", "experiments", ")", ":", "exp_list", "=", "experiments", "# Transform list if necessary", "if", "experiments", "is", "None", ":", "exp_list", "=", "[", "]", "elif", "isinstance", "(", "experiments", ",", "Experiment", ")", ":", "exp_list", "=", "[", "experiments", "]", "elif", "type", "(", "experiments", ")", "is", "dict", ":", "exp_list", "=", "[", "Experiment", ".", "from_json", "(", "name", ",", "spec", ")", "for", "name", ",", "spec", "in", "experiments", ".", "items", "(", ")", "]", "# Validate exp_list", "if", "(", "type", "(", "exp_list", ")", "is", "list", "and", "all", "(", "isinstance", "(", "exp", ",", "Experiment", ")", "for", "exp", "in", "exp_list", ")", ")", ":", "if", "len", "(", "exp_list", ")", ">", "1", ":", "logger", ".", "warning", "(", "\"All experiments will be \"", "\"using the same SearchAlgorithm.\"", ")", "else", ":", "raise", "TuneError", "(", "\"Invalid argument: {}\"", ".", "format", "(", "experiments", ")", ")", "return", "exp_list" ]
As per the law anonymize user identifier before sending it .
def hash_user_id ( self , user_id : str ) -> str : h = sha256 ( ) h . update ( user_id . encode ( ) ) return h . hexdigest ( )
10,204
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/base.py#L48-L55
[ "def", "_match_directories", "(", "self", ",", "entries", ",", "root", ",", "regex_string", ")", ":", "self", ".", "log", "(", "u\"Matching directory names in paged hierarchy\"", ")", "self", ".", "log", "(", "[", "u\"Matching within '%s'\"", ",", "root", "]", ")", "self", ".", "log", "(", "[", "u\"Matching regex '%s'\"", ",", "regex_string", "]", ")", "regex", "=", "re", ".", "compile", "(", "r\"\"", "+", "regex_string", ")", "directories", "=", "set", "(", ")", "root_len", "=", "len", "(", "root", ")", "for", "entry", "in", "entries", ":", "# look only inside root dir", "if", "entry", ".", "startswith", "(", "root", ")", ":", "self", ".", "log", "(", "[", "u\"Examining '%s'\"", ",", "entry", "]", ")", "# remove common prefix root/", "entry", "=", "entry", "[", "root_len", "+", "1", ":", "]", "# split path", "entry_splitted", "=", "entry", ".", "split", "(", "os", ".", "sep", ")", "# match regex", "if", "(", "(", "len", "(", "entry_splitted", ")", ">=", "2", ")", "and", "(", "re", ".", "match", "(", "regex", ",", "entry_splitted", "[", "0", "]", ")", "is", "not", "None", ")", ")", ":", "directories", ".", "add", "(", "entry_splitted", "[", "0", "]", ")", "self", ".", "log", "(", "[", "u\"Match: '%s'\"", ",", "entry_splitted", "[", "0", "]", "]", ")", "else", ":", "self", ".", "log", "(", "[", "u\"No match: '%s'\"", ",", "entry", "]", ")", "return", "sorted", "(", "directories", ")" ]
Delete a workflow .
def delete ( cls , uuid ) : to_delete = Workflow . query . get ( uuid ) db . session . delete ( to_delete )
10,205
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/models.py#L96-L99
[ "def", "unload", "(", "self", ")", ":", "unloaded", "=", "False", "if", "self", ".", "_lib", "is", "not", "None", ":", "if", "self", ".", "_winlib", "is", "not", "None", ":", "# ctypes passes integers as 32-bit C integer types, which will", "# truncate the value of a 64-bit pointer in 64-bit python, so", "# we have to change the FreeLibrary method to take a pointer", "# instead of an integer handle.", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", ".", "argtypes", "=", "(", "ctypes", ".", "c_void_p", ",", ")", "# On Windows we must free both loaded libraries before the", "# temporary file can be cleaned up.", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", "(", "self", ".", "_lib", ".", "_handle", ")", "ctypes", ".", "windll", ".", "kernel32", ".", "FreeLibrary", "(", "self", ".", "_winlib", ".", "_handle", ")", "self", ".", "_lib", "=", "None", "self", ".", "_winlib", "=", "None", "unloaded", "=", "True", "else", ":", "# On OSX and Linux, just release the library; it's not safe", "# to close a dll that ctypes is using.", "del", "self", ".", "_lib", "self", ".", "_lib", "=", "None", "unloaded", "=", "True", "if", "self", ".", "_temp", "is", "not", "None", ":", "os", ".", "remove", "(", "self", ".", "_temp", ".", "name", ")", "self", ".", "_temp", "=", "None", "return", "unloaded" ]
Run a workflow by name with list of data objects .
def run_worker ( wname , data , engine_uuid_hex = None , * * kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False if engine_uuid_hex : engine_uuid = uuid . UUID ( hex = engine_uuid_hex ) engine = WorkflowEngine . from_uuid ( uuid = engine_uuid , * * kwargs ) else : engine = WorkflowEngine . with_name ( wname , * * kwargs ) engine . save ( ) objects = get_workflow_object_instances ( data , engine ) db . session . commit ( ) engine . process ( objects , * * kwargs ) return engine
10,206
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/worker_engine.py#L30-L62
[ "def", "breakpoint_set", "(", "self", ",", "addr", ",", "thumb", "=", "False", ",", "arm", "=", "False", ")", ":", "flags", "=", "enums", ".", "JLinkBreakpoint", ".", "ANY", "if", "thumb", ":", "flags", "=", "flags", "|", "enums", ".", "JLinkBreakpoint", ".", "THUMB", "elif", "arm", ":", "flags", "=", "flags", "|", "enums", ".", "JLinkBreakpoint", ".", "ARM", "handle", "=", "self", ".", "_dll", ".", "JLINKARM_SetBPEx", "(", "int", "(", "addr", ")", ",", "flags", ")", "if", "handle", "<=", "0", ":", "raise", "errors", ".", "JLinkException", "(", "'Breakpoint could not be set.'", ")", "return", "handle" ]
Restart workflow from beginning with given engine UUID and any data .
def restart_worker ( uuid , * * kwargs ) : if 'stop_on_halt' not in kwargs : kwargs [ 'stop_on_halt' ] = False engine = WorkflowEngine . from_uuid ( uuid = uuid , * * kwargs ) if "data" not in kwargs : objects = workflow_object_class . query ( id_workflow = uuid ) else : data = kwargs . pop ( "data" ) if not isinstance ( data , ( list , tuple ) ) : data = [ data ] objects = get_workflow_object_instances ( data , engine ) db . session . commit ( ) engine . process ( objects , * * kwargs ) return engine
10,207
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/worker_engine.py#L65-L95
[ "def", "_max_weight_operator", "(", "ops", ":", "Iterable", "[", "PauliTerm", "]", ")", "->", "Union", "[", "None", ",", "PauliTerm", "]", ":", "mapping", "=", "dict", "(", ")", "# type: Dict[int, str]", "for", "op", "in", "ops", ":", "for", "idx", ",", "op_str", "in", "op", ":", "if", "idx", "in", "mapping", ":", "if", "mapping", "[", "idx", "]", "!=", "op_str", ":", "return", "None", "else", ":", "mapping", "[", "idx", "]", "=", "op_str", "op", "=", "functools", ".", "reduce", "(", "mul", ",", "(", "PauliTerm", "(", "op", ",", "q", ")", "for", "q", ",", "op", "in", "mapping", ".", "items", "(", ")", ")", ",", "sI", "(", ")", ")", "return", "op" ]
Analyze data and create corresponding WorkflowObjects .
def get_workflow_object_instances ( data , engine ) : workflow_objects = [ ] data_type = engine . get_default_data_type ( ) for data_object in data : if isinstance ( data_object , workflow_object_class . _get_current_object ( ) ) : if not data_object . data_type : data_object . data_type = data_type if data_object . id : data_object . log . debug ( "Existing workflow object found for " "this object." ) if data_object . status == data_object . known_statuses . COMPLETED : data_object . status = data_object . known_statuses . INITIAL workflow_objects . append ( data_object ) else : # Data is not already a WorkflowObject, we then # add the running object to run through the workflow. current_obj = create_data_object_from_data ( data_object , engine , data_type ) workflow_objects . append ( current_obj ) return workflow_objects
10,208
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/worker_engine.py#L136-L184
[ "def", "status", "(", "self", ",", "remote", "=", "False", ")", ":", "if", "remote", ":", "components", "=", "urlparse", ".", "urlparse", "(", "self", ".", "endpoint", ")", "try", ":", "result", "=", "self", ".", "session", ".", "get", "(", "components", "[", "0", "]", "+", "\"://\"", "+", "components", "[", "1", "]", "+", "\"/status\"", ",", "timeout", "=", "self", ".", "timeout", ")", "except", "Exception", "as", "e", ":", "if", "self", ".", "logger", ":", "self", ".", "logger", ".", "debug", "(", "\"Failed to connect to server for status: %s\"", ",", "e", ")", "result", "=", "None", "if", "result", "and", "result", ".", "status_code", "==", "200", ":", "self", ".", "server_status", "=", "result", ".", "json", "(", ")", "self", ".", "server_status", "[", "\"endpoint\"", "]", "=", "self", ".", "endpoint", "elif", "result", ":", "if", "self", ".", "logger", ":", "self", ".", "logger", ".", "debug", "(", "\"Server status response not understandable: Status: %d, Body: %s\"", ",", "result", ".", "status_code", ",", "result", ".", "text", ")", "self", ".", "server_status", "=", "{", "\"endpoint\"", ":", "self", ".", "endpoint", ",", "\"status\"", ":", "(", "\"Unexpected HTTP status \"", "+", "str", "(", "result", ".", "status_code", ")", "+", "\" at: \"", "+", "strftime", "(", "\"%d %b %Y %H:%M:%S +0000\"", ",", "gmtime", "(", ")", ")", ")", "}", "else", ":", "self", ".", "server_status", "=", "{", "\"endpoint\"", ":", "self", ".", "endpoint", ",", "\"status\"", ":", "\"Unreachable at: \"", "+", "strftime", "(", "\"%d %b %Y %H:%M:%S +0000\"", ",", "gmtime", "(", ")", ")", "}", "return", "self", ".", "local_status", ",", "self", ".", "server_status" ]
Create a new WorkflowObject from given data and return it .
def create_data_object_from_data ( data_object , engine , data_type ) : # Data is not already a WorkflowObject, we first # create an initial object for each data object. return workflow_object_class . create ( data = data_object , id_workflow = engine . uuid , status = workflow_object_class . known_statuses . INITIAL , data_type = data_type , )
10,209
https://github.com/inveniosoftware-contrib/invenio-workflows/blob/9c09fd29509a3db975ac2aba337e6760d8cfd3c2/invenio_workflows/worker_engine.py#L187-L210
[ "def", "max_texture_limit", "(", "self", ")", ":", "max_unit_array", "=", "(", "gl", ".", "GLint", "*", "1", ")", "(", ")", "gl", ".", "glGetIntegerv", "(", "gl", ".", "GL_MAX_TEXTURE_IMAGE_UNITS", ",", "max_unit_array", ")", "return", "max_unit_array", "[", "0", "]" ]
prints the rst page of the command what
def _print_rst ( self , what ) : print print "Command - %s::" % what exec ( "h = self.do_%s.__doc__" % what ) # noinspection PyUnboundLocalVariable h = textwrap . dedent ( h ) . replace ( "::\n\n" , "" ) h = textwrap . dedent ( h ) . replace ( "\n" , "\n " ) print h
10,210
https://github.com/cloudmesh-cmd3/cmd3/blob/92e33c96032fd3921f159198a0e57917c4dc34ed/cmd3/plugins/rst.py#L11-L27
[ "def", "_mmUpdateDutyCycles", "(", "self", ")", ":", "period", "=", "self", ".", "getDutyCyclePeriod", "(", ")", "unionSDRArray", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ")", "unionSDRArray", "[", "list", "(", "self", ".", "_mmTraces", "[", "\"unionSDR\"", "]", ".", "data", "[", "-", "1", "]", ")", "]", "=", "1", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", ",", "unionSDRArray", ",", "period", ")", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", ",", "self", ".", "_poolingActivation", ",", "period", ")" ]
Allow loading of JSON rule data .
def load_json ( cls , data , default_rule = None , raise_error = False ) : rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in json . loads ( data ) . items ( ) } return cls ( rules , default_rule )
10,211
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/enforcer.py#L30-L36
[ "def", "delete_entity", "(", "self", ",", "entity_id", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "api_path", "=", "'/v1/{mount_point}/entity/id/{id}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "id", "=", "entity_id", ",", ")", "return", "self", ".", "_adapter", ".", "delete", "(", "url", "=", "api_path", ",", ")" ]
Allow loading of rule data from a dictionary .
def from_dict ( cls , rules_dict : dict , default_rule = None , raise_error = False ) : # Parse the rules stored in the dictionary rules = { k : _parser . parse_rule ( v , raise_error ) for k , v in rules_dict . items ( ) } return cls ( rules , default_rule )
10,212
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/enforcer.py#L39-L46
[ "def", "_make_chunk_size", "(", "self", ",", "req_size", ")", ":", "size", "=", "req_size", "size", "+=", "2", "*", "self", ".", "_chunk_size_t_size", "# Two size fields", "size", "=", "self", ".", "_chunk_min_size", "if", "size", "<", "self", ".", "_chunk_min_size", "else", "size", "if", "size", "&", "self", ".", "_chunk_align_mask", ":", "# If the chunk would not be aligned", "size", "=", "(", "size", "&", "~", "self", ".", "_chunk_align_mask", ")", "+", "self", ".", "_chunk_align_mask", "+", "1", "# Fix it", "return", "size" ]
Created a new Rules object based on the provided dict of rules .
def _set_rules ( self , rules : dict , overwrite = True ) : if not isinstance ( rules , dict ) : raise TypeError ( 'rules must be an instance of dict or Rules,' 'got %r instead' % type ( rules ) ) if overwrite : self . rules = Rules ( rules , self . default_rule ) else : self . rules . update ( rules )
10,213
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/enforcer.py#L103-L113
[ "def", "run_experiment", "(", "self", ")", ":", "try", ":", "self", ".", "sign_up", "(", ")", "self", ".", "participate", "(", ")", "if", "self", ".", "sign_off", "(", ")", ":", "self", ".", "complete_experiment", "(", "\"worker_complete\"", ")", "else", ":", "self", ".", "complete_experiment", "(", "\"worker_failed\"", ")", "finally", ":", "self", ".", "driver", ".", "quit", "(", ")" ]
Load rules from policy file or cache .
def load_rules ( self , force_reload = False , overwrite = True ) : # double-checked locking if self . load_once and self . _policy_loaded : return with self . _load_lock : if self . load_once and self . _policy_loaded : return reloaded , data = _cache . read_file ( self . policy_file , force_reload = force_reload ) self . _policy_loaded = True if reloaded or not self . rules : rules = Rules . load_json ( data , self . default_rule , self . raise_error ) self . _set_rules ( rules , overwrite = overwrite ) LOG . debug ( 'Reload policy file: %s' , self . policy_file )
10,214
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/enforcer.py#L115-L131
[ "def", "is_valid_assignment", "(", "self", ",", "mtf_dimension_name", ",", "mesh_dimension_name", ")", ":", "return", "(", "(", "mtf_dimension_name", "in", "self", ".", "_splittable_mtf_dimension_names", ")", "and", "(", "self", ".", "_mtf_dimension_name_to_size_gcd", "[", "mtf_dimension_name", "]", "%", "self", ".", "_mesh_dimension_name_to_size", "[", "mesh_dimension_name", "]", "==", "0", ")", ")" ]
Checks authorization of a rule against the target and credentials .
def enforce ( self , rule , target , creds , exc = None , * args , * * kwargs ) : self . load_rules ( ) if isinstance ( rule , checks . BaseCheck ) : result = rule ( target , creds , self , rule ) elif not self . rules : # No rules means we're going to fail closed. result = False else : try : # Evaluate the rule result = self . rules [ rule ] ( target , creds , self , rule ) except KeyError : LOG . debug ( 'Rule [%s] does not exist' , rule ) # If the rule doesn't exist, fail closed result = False if self . raise_error and not result : if exc : raise exc ( * args , * * kwargs ) else : raise PolicyNotAuthorized ( rule , target , creds ) return result
10,215
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/enforcer.py#L133-L158
[ "def", "boxplot_frame_groupby", "(", "grouped", ",", "subplots", "=", "True", ",", "column", "=", "None", ",", "fontsize", "=", "None", ",", "rot", "=", "0", ",", "grid", "=", "True", ",", "ax", "=", "None", ",", "figsize", "=", "None", ",", "layout", "=", "None", ",", "sharex", "=", "False", ",", "sharey", "=", "True", ",", "*", "*", "kwds", ")", ":", "_raise_if_no_mpl", "(", ")", "_converter", ".", "_WARN", "=", "False", "if", "subplots", "is", "True", ":", "naxes", "=", "len", "(", "grouped", ")", "fig", ",", "axes", "=", "_subplots", "(", "naxes", "=", "naxes", ",", "squeeze", "=", "False", ",", "ax", "=", "ax", ",", "sharex", "=", "sharex", ",", "sharey", "=", "sharey", ",", "figsize", "=", "figsize", ",", "layout", "=", "layout", ")", "axes", "=", "_flatten", "(", "axes", ")", "from", "pandas", ".", "core", ".", "series", "import", "Series", "ret", "=", "Series", "(", ")", "for", "(", "key", ",", "group", ")", ",", "ax", "in", "zip", "(", "grouped", ",", "axes", ")", ":", "d", "=", "group", ".", "boxplot", "(", "ax", "=", "ax", ",", "column", "=", "column", ",", "fontsize", "=", "fontsize", ",", "rot", "=", "rot", ",", "grid", "=", "grid", ",", "*", "*", "kwds", ")", "ax", ".", "set_title", "(", "pprint_thing", "(", "key", ")", ")", "ret", ".", "loc", "[", "key", "]", "=", "d", "fig", ".", "subplots_adjust", "(", "bottom", "=", "0.15", ",", "top", "=", "0.9", ",", "left", "=", "0.1", ",", "right", "=", "0.9", ",", "wspace", "=", "0.2", ")", "else", ":", "from", "pandas", ".", "core", ".", "reshape", ".", "concat", "import", "concat", "keys", ",", "frames", "=", "zip", "(", "*", "grouped", ")", "if", "grouped", ".", "axis", "==", "0", ":", "df", "=", "concat", "(", "frames", ",", "keys", "=", "keys", ",", "axis", "=", "1", ")", "else", ":", "if", "len", "(", "frames", ")", ">", "1", ":", "df", "=", "frames", "[", "0", "]", ".", "join", "(", "frames", "[", "1", ":", ":", "]", ")", "else", ":", "df", "=", "frames", "[", "0", "]", "ret", "=", "df", ".", "boxplot", "(", "column", "=", "column", ",", "fontsize", "=", "fontsize", ",", "rot", "=", "rot", ",", "grid", "=", "grid", ",", "ax", "=", "ax", ",", "figsize", "=", "figsize", ",", "layout", "=", "layout", ",", "*", "*", "kwds", ")", "return", "ret" ]
Returns the flattened keys of BoundSpatialPoints in a schema
def get_flattened_bsp_keys_from_schema ( schema ) : keys = [ ] for key in schema . declared_fields . keys ( ) : field = schema . declared_fields [ key ] if isinstance ( field , mm . fields . Nested ) and isinstance ( field . schema , BoundSpatialPoint ) : keys . append ( "{}.{}" . format ( key , "position" ) ) return keys
10,216
https://github.com/seung-lab/EMAnnotationSchemas/blob/ca81eff0f449bd7eb0392e0982db8f3636446a9e/emannotationschemas/utils.py#L14-L29
[ "def", "data", "(", "self", ")", ":", "d", "=", "{", "}", "self", ".", "token", "=", "''", "try", ":", "d", "=", "self", ".", "viz", ".", "data", "self", ".", "token", "=", "d", ".", "get", "(", "'token'", ")", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "d", "[", "'error'", "]", "=", "str", "(", "e", ")", "return", "{", "'datasource'", ":", "self", ".", "datasource_name", ",", "'description'", ":", "self", ".", "description", ",", "'description_markeddown'", ":", "self", ".", "description_markeddown", ",", "'edit_url'", ":", "self", ".", "edit_url", ",", "'form_data'", ":", "self", ".", "form_data", ",", "'slice_id'", ":", "self", ".", "id", ",", "'slice_name'", ":", "self", ".", "slice_name", ",", "'slice_url'", ":", "self", ".", "slice_url", ",", "'modified'", ":", "self", ".", "modified", "(", ")", ",", "'changed_on_humanized'", ":", "self", ".", "changed_on_humanized", ",", "'changed_on'", ":", "self", ".", "changed_on", ".", "isoformat", "(", ")", ",", "}" ]
Return and generate if required the lock for this request .
def lock ( self ) -> asyncio . Lock : if self . lock_key not in self . request . custom_content : self . request . custom_content [ self . lock_key ] = asyncio . Lock ( ) return self . request . custom_content [ self . lock_key ]
10,217
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/triggers.py#L94-L102
[ "def", "SetValue", "(", "self", ",", "Channel", ",", "Parameter", ",", "Buffer", ")", ":", "try", ":", "if", "Parameter", "==", "PCAN_LOG_LOCATION", "or", "Parameter", "==", "PCAN_LOG_TEXT", "or", "Parameter", "==", "PCAN_TRACE_LOCATION", ":", "mybuffer", "=", "create_string_buffer", "(", "256", ")", "else", ":", "mybuffer", "=", "c_int", "(", "0", ")", "mybuffer", ".", "value", "=", "Buffer", "res", "=", "self", ".", "__m_dllBasic", ".", "CAN_SetValue", "(", "Channel", ",", "Parameter", ",", "byref", "(", "mybuffer", ")", ",", "sizeof", "(", "mybuffer", ")", ")", "return", "TPCANStatus", "(", "res", ")", "except", ":", "logger", ".", "error", "(", "\"Exception on PCANBasic.SetValue\"", ")", "raise" ]
Get the value from the API . Make sure to use a lock in order not to fetch the value twice at the same time .
async def get_value ( self ) : cc = self . request . custom_content async with self . lock : if self . content_key not in cc : cc [ self . content_key ] = await self . call_api ( ) return cc [ self . content_key ]
10,218
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/triggers.py#L119-L131
[ "def", "music_search", "(", "self", ",", "entitiy_type", ",", "query", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "make_request", "(", "'music'", ",", "entitiy_type", ",", "query", ",", "kwargs", ")" ]
If there is a text layer inside the request try to find a matching text in the specified intent .
async def rank ( self ) -> Optional [ float ] : if not self . request . has_layer ( l . RawText ) : return tl = self . request . get_layer ( l . RawText ) matcher = Matcher ( [ tuple ( Trigram ( y ) for y in x ) for x in await self . intent . strings ( self . request ) ] ) return matcher % Trigram ( tl . text )
10,219
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/triggers.py#L163-L178
[ "def", "truncate_schema", "(", "self", ")", ":", "assert", "self", ".", "server", "==", "'localhost'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "self", ".", "_initialize", "(", "con", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DELETE FROM publication;'", ")", "cur", ".", "execute", "(", "'TRUNCATE systems CASCADE;'", ")", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return" ]
Look for the QuickReply layer s slug into available choices .
def _rank_qr ( self , choices ) : from bernard . platforms . facebook import layers as fbl try : qr = self . request . get_layer ( fbl . QuickReply ) self . chosen = choices [ qr . slug ] self . slug = qr . slug if self . when is None or self . when == qr . slug : return 1.0 except KeyError : pass
10,220
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/triggers.py#L200-L214
[ "def", "eof_received", "(", "self", ")", "->", "bool", ":", "logger", ".", "debug", "(", "\"%s - event = eof_received()\"", ",", "self", ".", "side", ")", "super", "(", ")", ".", "eof_received", "(", ")", "return", "False" ]
Try to match the TextLayer with choice s intents .
async def _rank_text ( self , choices ) : tl = self . request . get_layer ( l . RawText ) best = 0.0 for slug , params in choices . items ( ) : strings = [ ] if params [ 'intent' ] : intent = getattr ( intents , params [ 'intent' ] ) strings += await intent . strings ( self . request ) if params [ 'text' ] : strings . append ( ( params [ 'text' ] , ) ) matcher = Matcher ( [ tuple ( Trigram ( y ) for y in x ) for x in strings ] ) score = matcher % Trigram ( await render ( tl . text , self . request ) ) if score > best : self . chosen = params self . slug = slug best = score if self . when is None or self . slug == self . when : return best
10,221
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/triggers.py#L216-L243
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_access", "is", "not", "None", ":", "_logger", ".", "debug", "(", "\"Cleaning up\"", ")", "pci_cleanup", "(", "self", ".", "_access", ")", "self", ".", "_access", "=", "None" ]
Feature type specific check of global recommended attributes .
def check_recommended_attributes ( self , dataset ) : results = [ ] recommended_ctx = TestCtx ( BaseCheck . MEDIUM , 'Recommended global attributes' ) # Check time_coverage_duration and resolution for attr in [ 'time_coverage_duration' , 'time_coverage_resolution' ] : attr_value = getattr ( dataset , attr , '' ) try : parse_duration ( attr_value ) recommended_ctx . assert_true ( True , '' ) # Score it True! except Exception : recommended_ctx . assert_true ( False , '{} should exist and be ISO-8601 format (example: PT1M30S), currently: {}' . format ( attr , attr_value ) ) results . append ( recommended_ctx . to_result ( ) ) return results
10,222
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_timeseries.py#L154-L171
[ "def", "_embedded_frames", "(", "frame_list", ",", "frame_format", ")", ":", "template", "=", "' frames[{0}] = \"data:image/{1};base64,{2}\"\\n'", "embedded", "=", "\"\\n\"", "for", "i", ",", "frame_data", "in", "enumerate", "(", "frame_list", ")", ":", "embedded", "+=", "template", ".", "format", "(", "i", ",", "frame_format", ",", "frame_data", ".", "replace", "(", "'\\n'", ",", "'\\\\\\n'", ")", ")", "return", "embedded" ]
Checks that the feature types of this dataset are consitent with a time series incomplete dataset
def check_dimensions ( self , dataset ) : required_ctx = TestCtx ( BaseCheck . HIGH , 'All geophysical variables are time-series incomplete feature types' ) message = '{} must be a valid timeseries feature type. It must have dimensions of (timeSeries, time).' message += ' And all coordinates must have dimensions of (timeSeries)' for variable in util . get_geophysical_variables ( dataset ) : is_valid = util . is_multi_timeseries_incomplete ( dataset , variable ) required_ctx . assert_true ( is_valid , message . format ( variable ) ) return required_ctx . to_result ( )
10,223
https://github.com/ioos/cc-plugin-ncei/blob/963fefd7fa43afd32657ac4c36aad4ddb4c25acf/cc_plugin_ncei/ncei_timeseries.py#L181-L196
[ "def", "txn_removeAssociation", "(", "self", ",", "server_url", ",", "handle", ")", ":", "self", ".", "db_remove_assoc", "(", "server_url", ",", "handle", ")", "return", "self", ".", "cur", ".", "rowcount", ">", "0" ]
Read a file if it has been modified .
def read_file ( filename : str , force_reload = False ) : if force_reload : _delete_cached_file ( filename ) reloaded = False mtime = os . path . getmtime ( filename ) cache_info = CACHE . setdefault ( filename , { } ) if not cache_info or mtime > cache_info . get ( 'mtime' , 0 ) : LOG . debug ( 'Reloading cached file %s' , filename ) with open ( filename ) as fp : cache_info [ 'data' ] = fp . read ( ) cache_info [ 'mtime' ] = mtime reloaded = True return reloaded , cache_info [ 'data' ]
10,224
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_cache.py#L19-L41
[ "def", "get_stores_secrets_volumes", "(", "cls", ",", "stores_secrets", ")", ":", "volumes", "=", "[", "]", "volume_mounts", "=", "[", "]", "for", "store_secret", "in", "stores_secrets", ":", "store", "=", "store_secret", "[", "'store'", "]", "if", "store", "in", "{", "GCS", ",", "S3", "}", ":", "secrets_volumes", ",", "secrets_volume_mounts", "=", "get_volume_from_secret", "(", "volume_name", "=", "cls", ".", "STORE_SECRET_VOLUME_NAME", ".", "format", "(", "store", ")", ",", "mount_path", "=", "cls", ".", "STORE_SECRET_KEY_MOUNT_PATH", ".", "format", "(", "store", ")", ",", "secret_name", "=", "store_secret", "[", "'persistence_secret'", "]", ",", ")", "volumes", "+=", "secrets_volumes", "volume_mounts", "+=", "secrets_volume_mounts", "return", "volumes", ",", "volume_mounts" ]
Use the validator set on a separate attribute on the class .
def use_model_attr ( attr ) : def use_model_validator ( instance , attribute , value ) : getattr ( instance , attr ) ( instance , attribute , value ) return use_model_validator
10,225
https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/model_validators.py#L13-L18
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
Must include at least a name key .
def is_creation_model ( instance , attribute , value ) : creation_name = value . get ( 'name' ) if not isinstance ( creation_name , str ) : instance_name = instance . __class__ . __name__ err_str = ( "'name' must be given as a string in the '{attr}' " "parameter of a '{cls}'. Given " "'{value}'" ) . format ( attr = attribute . name , cls = instance_name , value = creation_name ) raise ModelDataError ( err_str )
10,226
https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/model_validators.py#L45-L56
[ "def", "delete_topic_groups", "(", "self", ",", "group_id", ",", "topic_id", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - group_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"group_id\"", "]", "=", "group_id", "# REQUIRED - PATH - topic_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"topic_id\"", "]", "=", "topic_id", "self", ".", "logger", ".", "debug", "(", "\"DELETE /api/v1/groups/{group_id}/discussion_topics/{topic_id} with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"DELETE\"", ",", "\"/api/v1/groups/{group_id}/discussion_topics/{topic_id}\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
Must include a manifestationOfWork key .
def is_manifestation_model ( instance , attribute , value ) : instance_name = instance . __class__ . __name__ is_creation_model ( instance , attribute , value ) manifestation_of = value . get ( 'manifestationOfWork' ) if not isinstance ( manifestation_of , str ) : err_str = ( "'manifestationOfWork' must be given as a string in the " "'{attr}' parameter of a '{cls}'. Given " "'{value}'" ) . format ( attr = attribute . name , cls = instance_name , value = manifestation_of ) print ( err_str )
10,227
https://github.com/COALAIP/pycoalaip/blob/cecc8f6ff4733f0525fafcee63647753e832f0be/coalaip/model_validators.py#L68-L81
[ "def", "_stop_remote_console", "(", "self", ")", ":", "if", "self", ".", "_telnet_server", ":", "self", ".", "_telnet_server", ".", "close", "(", ")", "yield", "from", "self", ".", "_telnet_server", ".", "wait_closed", "(", ")", "self", ".", "_remote_pipe", ".", "close", "(", ")", "self", ".", "_telnet_server", "=", "None" ]
Define a preprocessor to run after the arguments are parsed and before the function is executed when running in console script mode .
def add_preprocessor ( preproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_preprocessor ( preproc ) return func return decorator
10,228
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L255-L269
[ "def", "EXCHANGE", "(", "classical_reg1", ",", "classical_reg2", ")", ":", "left", "=", "unpack_classical_reg", "(", "classical_reg1", ")", "right", "=", "unpack_classical_reg", "(", "classical_reg2", ")", "return", "ClassicalExchange", "(", "left", ",", "right", ")" ]
Define a postprocessor to run after the function is executed when running in console script mode .
def add_postprocessor ( postproc ) : def decorator ( func ) : func = ScriptAdaptor . _wrap ( func ) func . _add_postprocessor ( postproc ) return func return decorator
10,229
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L272-L289
[ "def", "associate_devices", "(", "self", ",", "thing_names", ",", "config_file", ",", "region", "=", "None", ",", "profile_name", "=", "None", ")", ":", "# TODO remove this function when Group discovery is enriched", "logging", ".", "info", "(", "\"associate_devices thing_names:{0}\"", ".", "format", "(", "thing_names", ")", ")", "config", "=", "GroupConfigFile", "(", "config_file", "=", "config_file", ")", "if", "region", "is", "None", ":", "region", "=", "self", ".", "_region", "devices", "=", "config", "[", "'devices'", "]", "if", "type", "(", "thing_names", ")", "is", "str", ":", "thing_names", "=", "[", "thing_names", "]", "iot_client", "=", "_get_iot_session", "(", "region", "=", "region", ",", "profile_name", "=", "profile_name", ")", "for", "thing_name", "in", "thing_names", ":", "thing", "=", "iot_client", ".", "describe_thing", "(", "thingName", "=", "thing_name", ")", "logging", ".", "info", "(", "\"Found existing Thing:{0}\"", ".", "format", "(", "thing", ")", ")", "p", "=", "iot_client", ".", "list_thing_principals", "(", "thingName", "=", "thing_name", ")", "logging", ".", "info", "(", "\"Existing Thing has principals:{0}\"", ".", "format", "(", "p", ")", ")", "devices", "[", "thing_name", "]", "=", "{", "'thing_arn'", ":", "thing", "[", "'attributes'", "]", "[", "'thingArn'", "]", ",", "'cert_arn'", ":", "p", "[", "'principals'", "]", "[", "0", "]", ",", "'cert_id'", ":", "thing", "[", "'attributes'", "]", "[", "'certificateId'", "]", ",", "'thing_name'", ":", "thing_name", "}", "logging", ".", "info", "(", "\"Thing:'{0}' associated with config:'{1}'\"", ".", "format", "(", "thing_name", ",", "config_file", ")", ")", "config", "[", "'devices'", "]", "=", "devices" ]
Set up logging for the script based on the configuration specified by the logging attribute of the command line arguments .
def _setup_logging ( args ) : log_conf = getattr ( args , 'logging' , None ) if log_conf : logging . config . fileConfig ( log_conf ) else : logging . basicConfig ( )
10,230
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L292-L308
[ "def", "get_unused_media", "(", "exclude", "=", "None", ")", ":", "if", "not", "exclude", ":", "exclude", "=", "[", "]", "all_media", "=", "get_all_media", "(", "exclude", ")", "used_media", "=", "get_used_media", "(", ")", "return", "all_media", "-", "used_media" ]
Set up or update limits in the Redis database .
def setup_limits ( conf_file , limits_file , do_reload = True , dry_run = False , debug = False ) : # If dry_run is set, default debug to True if dry_run : debug = True # Connect to the database... conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) control_channel = conf [ 'control' ] . get ( 'channel' , 'control' ) # Parse the limits file limits_tree = etree . parse ( limits_file ) # Now, we parse the limits XML file lims = [ ] for idx , lim in enumerate ( limits_tree . getroot ( ) ) : # Skip tags we don't recognize if lim . tag != 'limit' : warnings . warn ( "Unrecognized tag %r in limits file at index %d" % ( lim . tag , idx ) ) continue # Construct the limit and add it to the list of limits try : lims . append ( parse_limit_node ( db , idx , lim ) ) except Exception as exc : warnings . warn ( "Couldn't understand limit at index %d: %s" % ( idx , exc ) ) continue # Now that we have the limits, let's install them if debug : print >> sys . stderr , "Installing the following limits:" for lim in lims : print >> sys . stderr , " %r" % lim if not dry_run : database . limit_update ( db , limits_key , lims ) # Were we requested to reload the limits? if do_reload is False : return # OK, figure out what kind of reload to do params = [ ] if do_reload is True : # Nothing to do; use default semantics pass elif ( isinstance ( do_reload , ( int , long , float ) ) or ( isinstance ( do_reload , basestring ) and do_reload . isdigit ( ) ) ) : params = [ 'spread' , do_reload ] else : params = [ str ( do_reload ) ] # Issue the reload command if debug : cmd = [ 'reload' ] cmd . extend ( params ) print >> sys . stderr , ( "Issuing command: %s" % ' ' . join ( str ( c ) for c in cmd ) ) if not dry_run : database . command ( db , control_channel , 'reload' , * params )
10,231
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L473-L555
[ "def", "ekssum", "(", "handle", ",", "segno", ")", ":", "handle", "=", "ctypes", ".", "c_int", "(", "handle", ")", "segno", "=", "ctypes", ".", "c_int", "(", "segno", ")", "segsum", "=", "stypes", ".", "SpiceEKSegSum", "(", ")", "libspice", ".", "ekssum_c", "(", "handle", ",", "segno", ",", "ctypes", ".", "byref", "(", "segsum", ")", ")", "return", "segsum" ]
Given a Limit object generate an XML node .
def make_limit_node ( root , limit ) : # Build the base limit node limit_node = etree . SubElement ( root , 'limit' , { 'class' : limit . _limit_full_name } ) # Walk through all the recognized attributes for attr in sorted ( limit . attrs ) : desc = limit . attrs [ attr ] attr_type = desc . get ( 'type' , str ) value = getattr ( limit , attr ) # Determine the default value, if we have one... if 'default' in desc : default = ( desc [ 'default' ] ( ) if callable ( desc [ 'default' ] ) else desc [ 'default' ] ) # Skip attributes that have their default settings if value == default : continue # Set up the attr node attr_node = etree . SubElement ( limit_node , 'attr' , name = attr ) # Treat lists and dicts specially if attr_type == list : for val in value : val_node = etree . SubElement ( attr_node , 'value' ) val_node . text = str ( val ) elif attr_type == dict : for key , val in sorted ( value . items ( ) , key = lambda x : x [ 0 ] ) : val_node = etree . SubElement ( attr_node , 'value' , key = key ) val_node . text = str ( val ) else : attr_node . text = str ( value )
10,232
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L562-L602
[ "def", "_ProcessRegistryKeySource", "(", "self", ",", "source", ")", ":", "keys", "=", "source", ".", "base_source", ".", "attributes", ".", "get", "(", "\"keys\"", ",", "[", "]", ")", "if", "not", "keys", ":", "return", "interpolated_paths", "=", "artifact_utils", ".", "InterpolateListKbAttributes", "(", "input_list", "=", "keys", ",", "knowledge_base", "=", "self", ".", "knowledge_base", ",", "ignore_errors", "=", "self", ".", "ignore_interpolation_errors", ")", "glob_expressions", "=", "map", "(", "rdf_paths", ".", "GlobExpression", ",", "interpolated_paths", ")", "patterns", "=", "[", "]", "for", "pattern", "in", "glob_expressions", ":", "patterns", ".", "extend", "(", "pattern", ".", "Interpolate", "(", "knowledge_base", "=", "self", ".", "knowledge_base", ")", ")", "patterns", ".", "sort", "(", "key", "=", "len", ",", "reverse", "=", "True", ")", "file_finder_action", "=", "rdf_file_finder", ".", "FileFinderAction", ".", "Stat", "(", ")", "request", "=", "rdf_file_finder", ".", "FileFinderArgs", "(", "paths", "=", "patterns", ",", "action", "=", "file_finder_action", ",", "follow_links", "=", "True", ",", "pathtype", "=", "rdf_paths", ".", "PathSpec", ".", "PathType", ".", "REGISTRY", ")", "action", "=", "vfs_file_finder", ".", "RegistryKeyFromClient", "yield", "action", ",", "request" ]
Dump the current limits from the Redis database .
def dump_limits ( conf_file , limits_file , debug = False ) : # Connect to the database... conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) limits_key = conf [ 'control' ] . get ( 'limits_key' , 'limits' ) # Now, grab all the limits lims = [ limits . Limit . hydrate ( db , msgpack . loads ( lim ) ) for lim in db . zrange ( limits_key , 0 , - 1 ) ] # Build up the limits tree root = etree . Element ( 'limits' ) limit_tree = etree . ElementTree ( root ) for idx , lim in enumerate ( lims ) : if debug : print >> sys . stderr , "Dumping limit index %d: %r" % ( idx , lim ) make_limit_node ( root , lim ) # Write out the limits file if limits_file == '-' : limits_file = sys . stdout if debug : print >> sys . stderr , "Dumping limits to file %r" % limits_file limit_tree . write ( limits_file , xml_declaration = True , encoding = 'UTF-8' , pretty_print = True )
10,233
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L617-L652
[ "def", "segment", "(", "f", ",", "output", ",", "target_duration", ",", "mpegts", ")", ":", "try", ":", "target_duration", "=", "int", "(", "target_duration", ")", "except", "ValueError", ":", "exit", "(", "'Error: Invalid target duration.'", ")", "try", ":", "mpegts", "=", "int", "(", "mpegts", ")", "except", "ValueError", ":", "exit", "(", "'Error: Invalid MPEGTS value.'", ")", "WebVTTSegmenter", "(", ")", ".", "segment", "(", "f", ",", "output", ",", "target_duration", ",", "mpegts", ")" ]
Run the external control daemon .
def remote_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) daemon = remote . RemoteControlDaemon ( None , conf ) daemon . serve ( )
10,234
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L673-L683
[ "def", "_merge_colormaps", "(", "kwargs", ")", ":", "from", "trollimage", ".", "colormap", "import", "Colormap", "full_cmap", "=", "None", "palette", "=", "kwargs", "[", "'palettes'", "]", "if", "isinstance", "(", "palette", ",", "Colormap", ")", ":", "full_cmap", "=", "palette", "else", ":", "for", "itm", "in", "palette", ":", "cmap", "=", "create_colormap", "(", "itm", ")", "cmap", ".", "set_range", "(", "itm", "[", "\"min_value\"", "]", ",", "itm", "[", "\"max_value\"", "]", ")", "if", "full_cmap", "is", "None", ":", "full_cmap", "=", "cmap", "else", ":", "full_cmap", "=", "full_cmap", "+", "cmap", "return", "full_cmap" ]
Issue a command to all running control daemons .
def turnstile_command ( conf_file , command , arguments = [ ] , channel = None , debug = False ) : # Connect to the database... conf = config . Config ( conf_file = conf_file ) db = conf . get_database ( ) control_channel = conf [ 'control' ] . get ( 'channel' , 'control' ) # Now, set up the command command = command . lower ( ) ts_conv = False if command == 'ping' : # We handle 'ping' specially; first, figure out the channel if arguments : channel = arguments [ 0 ] else : channel = str ( uuid . uuid4 ( ) ) arguments = [ channel ] # Next, add on a timestamp if len ( arguments ) < 2 : arguments . append ( time . time ( ) ) ts_conv = True # Limit the argument list length arguments = arguments [ : 2 ] # OK, the command is all set up. Let us now send the command... if debug : cmd = [ command ] + arguments print >> sys . stderr , ( "Issuing command: %s" % ' ' . join ( cmd ) ) database . command ( db , control_channel , command , * arguments ) # Were we asked to listen on a channel? if not channel : return # OK, let's subscribe to the channel... pubsub = db . pubsub ( ) pubsub . subscribe ( channel ) # Now we listen... try : count = 0 for msg in pubsub . listen ( ) : # Make sure the message is one we're interested in if debug : formatted = pprint . pformat ( msg ) print >> sys . stderr , "Received message: %s" % formatted if ( msg [ 'type' ] not in ( 'pmessage' , 'message' ) or msg [ 'channel' ] != channel ) : continue count += 1 # Figure out the response response = msg [ 'data' ] . split ( ':' ) # If this is a 'pong' and ts_conv is true, add an RTT to # the response if ts_conv and response [ 0 ] == 'pong' : try : rtt = ( time . time ( ) - float ( response [ 2 ] ) ) * 100 response . append ( '(RTT %.2fms)' % rtt ) except Exception : # IndexError or ValueError, probably; ignore it pass # Print out the response print "Response % 5d: %s" % ( count , ' ' . join ( response ) ) except KeyboardInterrupt : # We want to break out of the loop, but not return any error # to the caller... pass
10,235
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L712-L803
[ "def", "create_index", "(", "self", ",", "indexname", "=", "None", ",", "index_conf", "=", "None", ")", ":", "if", "indexname", "is", "None", ":", "indexname", "=", "self", ".", "index_name", "log", ".", "debug", "(", "\"Creating new index: '{0}'\"", ".", "format", "(", "indexname", ")", ")", "if", "index_conf", "is", "None", ":", "index_conf", "=", "{", "'settings'", ":", "self", ".", "settings", ",", "'mappings'", ":", "{", "'book'", ":", "{", "'properties'", ":", "self", ".", "properties", "}", "}", "}", "try", ":", "self", ".", "es", ".", "indices", ".", "create", "(", "index", "=", "indexname", ",", "body", "=", "index_conf", ")", "except", "TransportError", "as", "te", ":", "if", "te", ".", "error", ".", "startswith", "(", "\"IndexAlreadyExistsException\"", ")", ":", "raise", "Exception", "(", "\"Cannot create index '{}', already exists\"", ".", "format", "(", "indexname", ")", ")", "else", ":", "raise" ]
Run the compactor daemon .
def compactor_daemon ( conf_file ) : eventlet . monkey_patch ( ) conf = config . Config ( conf_file = conf_file ) compactor . compactor ( conf )
10,236
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L820-L829
[ "def", "create_month_selectbox", "(", "name", ",", "selected_month", "=", "0", ",", "ln", "=", "None", ")", ":", "ln", "=", "default_ln", "(", "ln", ")", "out", "=", "\"<select name=\\\"%s\\\">\\n\"", "%", "name", "for", "i", "in", "range", "(", "0", ",", "13", ")", ":", "out", "+=", "\"<option value=\\\"%i\\\"\"", "%", "i", "if", "(", "i", "==", "selected_month", ")", ":", "out", "+=", "\" selected=\\\"selected\\\"\"", "out", "+=", "\">%s</option>\\n\"", "%", "get_i18n_month_name", "(", "i", ",", "ln", ")", "out", "+=", "\"</select>\\n\"", "return", "out" ]
Ensures that the function is wrapped in a ScriptAdaptor object . If it is not a new ScriptAdaptor will be returned . If it is the ScriptAdaptor is returned .
def _wrap ( cls , func ) : if isinstance ( func , cls ) : return func return functools . update_wrapper ( cls ( func ) , func )
10,237
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L50-L61
[ "def", "delete_repository_config", "(", "namespace", ",", "name", ",", "snapshot_id", ")", ":", "uri", "=", "\"configurations/{0}/{1}/{2}\"", ".", "format", "(", "namespace", ",", "name", ",", "snapshot_id", ")", "return", "__delete", "(", "uri", ")" ]
Set up an argparse . ArgumentParser object by adding all the arguments taken by the function .
def setup_args ( self , parser ) : # Add all the arguments to the argument parser for args , kwargs in self . _arguments : parser . add_argument ( * args , * * kwargs )
10,238
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L131-L139
[ "def", "mass_properties", "(", "self", ")", ":", "mass", "=", "triangles", ".", "mass_properties", "(", "triangles", "=", "self", ".", "triangles", ",", "crosses", "=", "self", ".", "triangles_cross", ",", "density", "=", "self", ".", "_density", ",", "center_mass", "=", "self", ".", "_center_mass", ",", "skip_inertia", "=", "False", ")", "# if magical clean- up mode is enabled", "# and mesh is watertight/wound correctly but with negative", "# volume it means that every triangle is probably facing", "# inwards, so we invert it in- place without dumping cache", "if", "(", "self", ".", "_validate", "and", "self", ".", "is_watertight", "and", "self", ".", "is_winding_consistent", "and", "np", ".", "linalg", ".", "det", "(", "mass", "[", "'inertia'", "]", ")", "<", "0.0", "and", "mass", "[", "'mass'", "]", "<", "0.0", "and", "mass", "[", "'volume'", "]", "<", "0.0", ")", ":", "# negate mass properties so we don't need to recalculate", "mass", "[", "'inertia'", "]", "=", "-", "mass", "[", "'inertia'", "]", "mass", "[", "'mass'", "]", "=", "-", "mass", "[", "'mass'", "]", "mass", "[", "'volume'", "]", "=", "-", "mass", "[", "'volume'", "]", "# invert the faces and normals of the mesh", "self", ".", "invert", "(", ")", "return", "mass" ]
Given a Namespace object drawn from argparse determines the keyword arguments to pass to the underlying function . Note that if the underlying function accepts all keyword arguments the dictionary returned will contain the entire contents of the Namespace object . Also note that an AttributeError will be raised if any argument required by the function is not set in the Namespace object .
def get_kwargs ( self , args ) : # Now we need to figure out which arguments the final function # actually needs kwargs = { } argspec = inspect . getargspec ( self . _func ) required = set ( argspec . args [ : - len ( argspec . defaults ) ] if argspec . defaults else argspec . args ) for arg_name in argspec . args : try : kwargs [ arg_name ] = getattr ( args , arg_name ) except AttributeError : if arg_name in required : # If this happens, that's a programming failure raise # If the function accepts any keyword argument, add whatever # remains if argspec . keywords : for key , value in args . __dict__ . items ( ) : if key in kwargs : # Already handled continue kwargs [ key ] = value return kwargs
10,239
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L141-L177
[ "def", "is_labial", "(", "c", ",", "lang", ")", ":", "o", "=", "get_offset", "(", "c", ",", "lang", ")", "return", "(", "o", ">=", "LABIAL_RANGE", "[", "0", "]", "and", "o", "<=", "LABIAL_RANGE", "[", "1", "]", ")" ]
Call the function as a console script . Command line arguments are parsed preprocessors are called then the function is called . If a debug attribute is set by the command line arguments and it is True any exception raised by the underlying function will be reraised ; otherwise the return value will be either the return value of the function or the text contents of the exception .
def console ( self ) : # First, let's parse the arguments parser = argparse . ArgumentParser ( description = self . description ) self . setup_args ( parser ) args = parser . parse_args ( ) # Next, let's run the preprocessors in order for proc in self . _preprocess : try : proc ( args ) except Exception as exc : if getattr ( args , 'debug' , False ) : raise return str ( exc ) # Finally, safely call the underlying function result = self . safe_call ( self . get_kwargs ( args ) , args ) # Now, run the postprocessors in order for proc in self . _postprocess : result = proc ( args , result ) return result
10,240
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/tools.py#L206-L238
[ "def", "DeleteSubjects", "(", "self", ",", "subjects", ",", "sync", "=", "False", ")", ":", "for", "subject", "in", "subjects", ":", "self", ".", "DeleteSubject", "(", "subject", ",", "sync", "=", "sync", ")" ]
Import a class based on its full name .
def import_class ( name : Text ) -> Type : parts = name . split ( '.' ) module_name = parts [ : - 1 ] class_name = parts [ - 1 ] module_ = importlib . import_module ( '.' . join ( module_name ) ) return getattr ( module_ , class_name )
10,241
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L34-L45
[ "def", "render", "(", "self", ",", "data", ",", "accepted_media_type", "=", "None", ",", "renderer_context", "=", "None", ")", ":", "if", "'SWAGGER_JSON_PATH'", "in", "os", ".", "environ", ":", "with", "io", ".", "open", "(", "os", ".", "environ", "[", "'SWAGGER_JSON_PATH'", "]", ",", "'rb'", ")", "as", "f", ":", "return", "f", ".", "read", "(", ")", "else", ":", "return", "super", "(", "ConditionalOpenAPIRenderer", ",", "self", ")", ".", "render", "(", "data", ",", "accepted_media_type", ",", "renderer_context", ")" ]
Make a json - serializable type recursively read - only
def make_ro ( obj : Any , forgive_type = False ) : if isinstance ( obj , ( str , bytes , int , float , bool , RoDict , RoList ) ) or obj is None : return obj elif isinstance ( obj , Mapping ) : return RoDict ( obj , forgive_type ) elif isinstance ( obj , Sequence ) : return RoList ( obj , forgive_type ) elif forgive_type : return obj else : raise ValueError ( 'Trying to make read-only an object of type "{}"' . format ( obj . __class__ . __name__ ) )
10,242
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L127-L147
[ "def", "eof_received", "(", "self", ")", "->", "bool", ":", "logger", ".", "debug", "(", "\"%s - event = eof_received()\"", ",", "self", ".", "side", ")", "super", "(", ")", ".", "eof_received", "(", ")", "return", "False" ]
Copy a RO object into a RW structure made with standard Python classes .
def make_rw ( obj : Any ) : if isinstance ( obj , RoDict ) : return { k : make_rw ( v ) for k , v in obj . items ( ) } elif isinstance ( obj , RoList ) : return [ make_rw ( x ) for x in obj ] else : return obj
10,243
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L150-L162
[ "def", "_get_feed_cache", "(", "self", ")", ":", "feed_cache", "=", "None", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "_feed_cache_file", ")", ":", "maxage", "=", "datetime", ".", "now", "(", ")", "-", "timedelta", "(", "minutes", "=", "self", ".", "_cachetime", ")", "file_ts", "=", "datetime", ".", "fromtimestamp", "(", "os", ".", "stat", "(", "self", ".", "_feed_cache_file", ")", ".", "st_mtime", ")", "if", "file_ts", ">", "maxage", ":", "try", ":", "with", "open", "(", "self", ".", "_feed_cache_file", ",", "'rb'", ")", "as", "cache", ":", "feed_cache", "=", "cache", ".", "read", "(", ")", "finally", ":", "pass", "return", "feed_cache" ]
Given an URL change the query string to include the values specified in the dictionary .
def patch_qs ( url : Text , data : Dict [ Text , Text ] ) -> Text : qs_id = 4 p = list ( urlparse ( url ) ) qs = parse_qsl ( p [ qs_id ] ) # type: List[Tuple[Text, Text]] patched_qs = list ( chain ( filter ( lambda x : x [ 0 ] not in data , qs ) , data . items ( ) , ) ) p [ qs_id ] = urlencode ( patched_qs ) return urlunparse ( p )
10,244
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L203-L225
[ "def", "_error_messages", "(", "self", ",", "driver_id", ")", ":", "assert", "isinstance", "(", "driver_id", ",", "ray", ".", "DriverID", ")", "message", "=", "self", ".", "redis_client", ".", "execute_command", "(", "\"RAY.TABLE_LOOKUP\"", ",", "ray", ".", "gcs_utils", ".", "TablePrefix", ".", "ERROR_INFO", ",", "\"\"", ",", "driver_id", ".", "binary", "(", ")", ")", "# If there are no errors, return early.", "if", "message", "is", "None", ":", "return", "[", "]", "gcs_entries", "=", "ray", ".", "gcs_utils", ".", "GcsTableEntry", ".", "GetRootAsGcsTableEntry", "(", "message", ",", "0", ")", "error_messages", "=", "[", "]", "for", "i", "in", "range", "(", "gcs_entries", ".", "EntriesLength", "(", ")", ")", ":", "error_data", "=", "ray", ".", "gcs_utils", ".", "ErrorTableData", ".", "GetRootAsErrorTableData", "(", "gcs_entries", ".", "Entries", "(", "i", ")", ",", "0", ")", "assert", "driver_id", ".", "binary", "(", ")", "==", "error_data", ".", "DriverId", "(", ")", "error_message", "=", "{", "\"type\"", ":", "decode", "(", "error_data", ".", "Type", "(", ")", ")", ",", "\"message\"", ":", "decode", "(", "error_data", ".", "ErrorMessage", "(", ")", ")", ",", "\"timestamp\"", ":", "error_data", ".", "Timestamp", "(", ")", ",", "}", "error_messages", ".", "append", "(", "error_message", ")", "return", "error_messages" ]
Checks that all keys present in subset are present and have the same value in full_set . If a key is in full_set but not in subset then True will be returned anyways .
def dict_is_subset ( subset : Any , full_set : Any ) -> bool : if not isinstance ( subset , full_set . __class__ ) : return False elif isinstance ( subset , dict ) : for k , v in subset . items ( ) : if k not in full_set or not dict_is_subset ( v , full_set [ k ] ) : return False return True elif isinstance ( subset , list ) : if len ( subset ) != len ( full_set ) : return False for a , b in zip ( subset , full_set ) : if not dict_is_subset ( a , b ) : return False return True else : return subset == full_set
10,245
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L234-L259
[ "def", "serve_forever", "(", "self", ",", "poll_interval", "=", "0.5", ")", ":", "self", ".", "serial_port", ".", "timeout", "=", "poll_interval", "while", "not", "self", ".", "_shutdown_request", ":", "try", ":", "self", ".", "serve_once", "(", ")", "except", "(", "CRCError", ",", "struct", ".", "error", ")", "as", "e", ":", "log", ".", "error", "(", "'Can\\'t handle request: {0}'", ".", "format", "(", "e", ")", ")", "except", "(", "SerialTimeoutException", ",", "ValueError", ")", ":", "pass" ]
Transform a class exp into an actual regex
def _compile ( self , expression ) : x = self . RE_PYTHON_VAR . sub ( '(?:\\1,)' , expression ) x = self . RE_SPACES . sub ( '' , x ) return re . compile ( x )
10,246
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L177-L184
[ "def", "iloc", "(", "cls", ",", "dataset", ",", "index", ")", ":", "rows", ",", "cols", "=", "index", "scalar", "=", "False", "if", "isinstance", "(", "cols", ",", "slice", ")", ":", "cols", "=", "[", "d", ".", "name", "for", "d", "in", "dataset", ".", "dimensions", "(", ")", "]", "[", "cols", "]", "elif", "np", ".", "isscalar", "(", "cols", ")", ":", "scalar", "=", "np", ".", "isscalar", "(", "rows", ")", "cols", "=", "[", "dataset", ".", "get_dimension", "(", "cols", ")", ".", "name", "]", "else", ":", "cols", "=", "[", "dataset", ".", "get_dimension", "(", "d", ")", ".", "name", "for", "d", "in", "index", "[", "1", "]", "]", "if", "np", ".", "isscalar", "(", "rows", ")", ":", "rows", "=", "[", "rows", "]", "data", "=", "OrderedDict", "(", ")", "for", "c", "in", "cols", ":", "data", "[", "c", "]", "=", "dataset", ".", "data", "[", "c", "]", ".", "compute", "(", ")", ".", "iloc", "[", "rows", "]", ".", "values", "if", "scalar", ":", "return", "data", "[", "cols", "[", "0", "]", "]", "[", "0", "]", "return", "tuple", "(", "data", ".", "values", "(", ")", ")" ]
Transforms a list of objects into a matchable string
def _make_string ( self , objects : List [ Any ] ) -> Text : return '' . join ( x . __class__ . __name__ + ',' for x in objects )
10,247
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L186-L191
[ "def", "on_recv", "(", "self", ",", "cf", ")", ":", "data", "=", "bytes", "(", "cf", ".", "data", ")", "if", "len", "(", "data", ")", "<", "2", ":", "return", "ae", "=", "0", "if", "self", ".", "extended_rx_addr", "is", "not", "None", ":", "ae", "=", "1", "if", "len", "(", "data", ")", "<", "3", ":", "return", "if", "six", ".", "indexbytes", "(", "data", ",", "0", ")", "!=", "self", ".", "extended_rx_addr", ":", "return", "n_pci", "=", "six", ".", "indexbytes", "(", "data", ",", "ae", ")", "&", "0xf0", "if", "n_pci", "==", "N_PCI_FC", ":", "with", "self", ".", "tx_mutex", ":", "self", ".", "_recv_fc", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_SF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_sf", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_FF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_ff", "(", "data", "[", "ae", ":", "]", ")", "elif", "n_pci", "==", "N_PCI_CF", ":", "with", "self", ".", "rx_mutex", ":", "self", ".", "_recv_cf", "(", "data", "[", "ae", ":", "]", ")" ]
Return True if the list of objects matches the expression .
def match ( self , objects : List [ Any ] ) -> bool : s = self . _make_string ( objects ) m = self . _compiled_expression . match ( s ) return m is not None
10,248
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/utils.py#L193-L200
[ "def", "writearff", "(", "data", ",", "filename", ",", "relation_name", "=", "None", ",", "index", "=", "True", ")", ":", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "fp", "=", "open", "(", "filename", ",", "'w'", ")", "if", "relation_name", "is", "None", ":", "relation_name", "=", "os", ".", "path", ".", "basename", "(", "filename", ")", "else", ":", "fp", "=", "filename", "if", "relation_name", "is", "None", ":", "relation_name", "=", "\"pandas\"", "try", ":", "data", "=", "_write_header", "(", "data", ",", "fp", ",", "relation_name", ",", "index", ")", "fp", ".", "write", "(", "\"\\n\"", ")", "_write_data", "(", "data", ",", "fp", ")", "finally", ":", "fp", ".", "close", "(", ")" ]
Gets a config opt from conf file under section sect .
def get_conf ( conf , sect , opt ) : argu = getattr ( args , "mambupy_" + opt . lower ( ) ) if not argu : envir = os . environ . get ( "MAMBUPY_" + opt . upper ( ) ) if not envir : try : return conf . get ( sect , opt ) except NoSectionError : return default_configs [ opt ] return envir return argu
10,249
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/mambuconfig.py#L119-L152
[ "def", "start_archive", "(", "self", ",", "session_id", ",", "has_audio", "=", "True", ",", "has_video", "=", "True", ",", "name", "=", "None", ",", "output_mode", "=", "OutputModes", ".", "composed", ",", "resolution", "=", "None", ")", ":", "if", "not", "isinstance", "(", "output_mode", ",", "OutputModes", ")", ":", "raise", "OpenTokException", "(", "u", "(", "'Cannot start archive, {0} is not a valid output mode'", ")", ".", "format", "(", "output_mode", ")", ")", "if", "resolution", "and", "output_mode", "==", "OutputModes", ".", "individual", ":", "raise", "OpenTokException", "(", "u", "(", "'Invalid parameters: Resolution cannot be supplied for individual output mode.'", ")", ")", "payload", "=", "{", "'name'", ":", "name", ",", "'sessionId'", ":", "session_id", ",", "'hasAudio'", ":", "has_audio", ",", "'hasVideo'", ":", "has_video", ",", "'outputMode'", ":", "output_mode", ".", "value", ",", "'resolution'", ":", "resolution", ",", "}", "response", "=", "requests", ".", "post", "(", "self", ".", "endpoints", ".", "archive_url", "(", ")", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "self", ".", "json_headers", "(", ")", ",", "proxies", "=", "self", ".", "proxies", ",", "timeout", "=", "self", ".", "timeout", ")", "if", "response", ".", "status_code", "<", "300", ":", "return", "Archive", "(", "self", ",", "response", ".", "json", "(", ")", ")", "elif", "response", ".", "status_code", "==", "403", ":", "raise", "AuthError", "(", ")", "elif", "response", ".", "status_code", "==", "400", ":", "\"\"\"\n The HTTP response has a 400 status code in the following cases:\n You do not pass in a session ID or you pass in an invalid session ID.\n No clients are actively connected to the OpenTok session.\n You specify an invalid resolution value.\n The outputMode property is set to \"individual\" and you set the resolution property and (which is not supported in individual stream archives).\n \"\"\"", "raise", "RequestError", "(", "response", ".", "json", "(", ")", ".", "get", "(", "\"message\"", ")", ")", "elif", "response", ".", "status_code", "==", "404", ":", "raise", "NotFoundError", "(", "\"Session not found\"", ")", "elif", "response", ".", "status_code", "==", "409", ":", "raise", "ArchiveError", "(", "response", ".", "json", "(", ")", ".", "get", "(", "\"message\"", ")", ")", "else", ":", "raise", "RequestError", "(", "\"An unexpected error occurred\"", ",", "response", ".", "status_code", ")" ]
Get ISO8601 - formatted timestamp string .
def iso8601timestamp ( T = None , nanos = True , utc = False ) : T = time . time ( ) if T is None else T Ti = math . floor ( T ) Tn = round ( ( T - Ti ) * 1e9 ) if Tn >= 1e9 : Ti += 1 Tn = 0 s = time . gmtime ( Ti ) if utc else time . localtime ( Ti ) f = time . strftime ( "%Y%m%dT%H%M%S" , s ) n = ".{:09d}" . format ( Tn ) if nanos else "" tz = "Z" if utc else time . strftime ( "%z" , s ) return f + n + tz
10,250
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/fhs.py#L5-L18
[ "def", "OnSelectReader", "(", "self", ",", "event", ")", ":", "item", "=", "event", ".", "GetItem", "(", ")", "if", "item", ":", "itemdata", "=", "self", ".", "readertreepanel", ".", "readertreectrl", ".", "GetItemPyData", "(", "item", ")", "if", "isinstance", "(", "itemdata", ",", "smartcard", ".", "Card", ".", "Card", ")", ":", "self", ".", "dialogpanel", ".", "OnSelectCard", "(", "itemdata", ")", "elif", "isinstance", "(", "itemdata", ",", "smartcard", ".", "reader", ".", "Reader", ".", "Reader", ")", ":", "self", ".", "dialogpanel", ".", "OnSelectReader", "(", "itemdata", ")", "else", ":", "self", ".", "dialogpanel", ".", "OnDeselectCard", "(", "itemdata", ")" ]
Create working directory for experiment if not existing already .
def createWorkDir ( baseDir , projName , expUUID , expNames = [ ] , nanos = True , utc = False ) : # # First, ensure the project's top-level hierarchy, especially by-uuid/, # exists, so that the only possible failure is due to the creation of # one additional directory. # projDir = os . path . join ( baseDir , projName ) byuuidDir = os . path . join ( projDir , "by-uuid" ) bytimeDir = os . path . join ( projDir , "by-time" ) bynameDir = os . path . join ( projDir , "by-name" , * expNames ) byuuidPath = os . path . join ( byuuidDir , expUUID ) os . makedirs ( byuuidDir , mode = 0o755 , exist_ok = True ) os . makedirs ( bytimeDir , mode = 0o755 , exist_ok = True ) os . makedirs ( bynameDir , mode = 0o755 , exist_ok = True ) # # Attempt the creation of the experiment workDir by its UUID. Record # whether we were the original creators. # try : preexisting = False os . makedirs ( byuuidPath , mode = 0o755 , exist_ok = False ) except FileExistsError : preexisting = True # # If we were the first to create this working directory, additionally # make symlinks pointing to it from the auxiliary directories. # if not preexisting : expTime = iso8601timestamp ( nanos = nanos , utc = utc ) expTimeUUID = expTime + "-" + expUUID bytimePath = os . path . join ( bytimeDir , expTimeUUID ) bynamePath = os . path . join ( bynameDir , expUUID ) os . symlink ( os . path . relpath ( byuuidPath , bytimeDir ) , bytimePath , True ) os . symlink ( os . path . relpath ( byuuidPath , bynameDir ) , bynamePath , True ) # # Create handy .rsync-filter files. # with contextlib . suppress ( OSError ) : with open ( os . path . join ( baseDir , ".rsync-filter" ) , "x" ) as f : f . write ( "#\n" "# rsync filter rules.\n" "#\n" "# When the argument -F is given to rsync, the rules within will be obeyed.\n" "#\n" ) with contextlib . suppress ( OSError ) : with open ( os . path . join ( projDir , ".rsync-filter" ) , "x" ) as f : f . write ( "#\n" "# rsync filter rules.\n" "#\n" "# When the argument -F is given to rsync, the rules within will be obeyed.\n" "#\n" ) # # Return the constructed workDir. # return byuuidPath
10,251
https://github.com/obilaniu/Nauka/blob/1492a4f9d204a868c1a8a1d327bd108490b856b4/src/nauka/fhs.py#L21-L89
[ "def", "scale_rows", "(", "A", ",", "v", ",", "copy", "=", "True", ")", ":", "v", "=", "np", ".", "ravel", "(", "v", ")", "M", ",", "N", "=", "A", ".", "shape", "if", "not", "isspmatrix", "(", "A", ")", ":", "raise", "ValueError", "(", "'scale rows needs a sparse matrix'", ")", "if", "M", "!=", "len", "(", "v", ")", ":", "raise", "ValueError", "(", "'scale vector has incompatible shape'", ")", "if", "copy", ":", "A", "=", "A", ".", "copy", "(", ")", "A", ".", "data", "=", "np", ".", "asarray", "(", "A", ".", "data", ",", "dtype", "=", "upcast", "(", "A", ".", "dtype", ",", "v", ".", "dtype", ")", ")", "else", ":", "v", "=", "np", ".", "asarray", "(", "v", ",", "dtype", "=", "A", ".", "dtype", ")", "if", "isspmatrix_csr", "(", "A", ")", ":", "csr_scale_rows", "(", "M", ",", "N", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "A", ".", "data", ",", "v", ")", "elif", "isspmatrix_bsr", "(", "A", ")", ":", "R", ",", "C", "=", "A", ".", "blocksize", "bsr_scale_rows", "(", "int", "(", "M", "/", "R", ")", ",", "int", "(", "N", "/", "C", ")", ",", "R", ",", "C", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "np", ".", "ravel", "(", "A", ".", "data", ")", ",", "v", ")", "elif", "isspmatrix_csc", "(", "A", ")", ":", "pyamg", ".", "amg_core", ".", "csc_scale_rows", "(", "M", ",", "N", ",", "A", ".", "indptr", ",", "A", ".", "indices", ",", "A", ".", "data", ",", "v", ")", "else", ":", "fmt", "=", "A", ".", "format", "A", "=", "scale_rows", "(", "csr_matrix", "(", "A", ")", ",", "v", ")", ".", "asformat", "(", "fmt", ")", "return", "A" ]
Creates a string representation of time since the given start_time .
def humanize_timesince ( start_time ) : # pylint:disable=too-many-return-statements if not start_time : return start_time delta = local_now ( ) - start_time # assumption: negative delta values originate from clock # differences on different app server machines if delta . total_seconds ( ) < 0 : return 'a few seconds ago' num_years = delta . days // 365 if num_years > 0 : return '{} year{} ago' . format ( * ( ( num_years , 's' ) if num_years > 1 else ( num_years , '' ) ) ) num_weeks = delta . days // 7 if num_weeks > 0 : return '{} week{} ago' . format ( * ( ( num_weeks , 's' ) if num_weeks > 1 else ( num_weeks , '' ) ) ) num_days = delta . days if num_days > 0 : return '{} day{} ago' . format ( * ( ( num_days , 's' ) if num_days > 1 else ( num_days , '' ) ) ) num_hours = delta . seconds // 3600 if num_hours > 0 : return '{} hour{} ago' . format ( * ( ( num_hours , 's' ) if num_hours > 1 else ( num_hours , '' ) ) ) num_minutes = delta . seconds // 60 if num_minutes > 0 : return '{} minute{} ago' . format ( * ( ( num_minutes , 's' ) if num_minutes > 1 else ( num_minutes , '' ) ) ) return 'a few seconds ago'
10,252
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/humanize.py#L7-L43
[ "def", "_get_optional_attrs", "(", "kws", ")", ":", "vals", "=", "OboOptionalAttrs", ".", "attributes", ".", "intersection", "(", "kws", ".", "keys", "(", ")", ")", "if", "'sections'", "in", "kws", ":", "vals", ".", "add", "(", "'relationship'", ")", "if", "'norel'", "in", "kws", ":", "vals", ".", "discard", "(", "'relationship'", ")", "return", "vals" ]
Creates a string representation of timedelta .
def humanize_timedelta ( seconds ) : hours , remainder = divmod ( seconds , 3600 ) days , hours = divmod ( hours , 24 ) minutes , seconds = divmod ( remainder , 60 ) if days : result = '{}d' . format ( days ) if hours : result += ' {}h' . format ( hours ) if minutes : result += ' {}m' . format ( minutes ) return result if hours : result = '{}h' . format ( hours ) if minutes : result += ' {}m' . format ( minutes ) return result if minutes : result = '{}m' . format ( minutes ) if seconds : result += ' {}s' . format ( seconds ) return result return '{}s' . format ( seconds )
10,253
https://github.com/polyaxon/hestia/blob/382ed139cff8bf35c987cfc30a31b72c0d6b808e/hestia/humanize.py#L46-L72
[ "def", "get_images_by_tail_number", "(", "self", ",", "tail_number", ",", "page", "=", "1", ",", "limit", "=", "100", ")", ":", "url", "=", "REG_BASE", ".", "format", "(", "tail_number", ",", "str", "(", "self", ".", "AUTH_TOKEN", ")", ",", "page", ",", "limit", ")", "return", "self", ".", "_fr24", ".", "get_aircraft_image_data", "(", "url", ")" ]
Overrides default start behaviour by raising ConnectionError instead of custom requests_mock . exceptions . NoMockAddress .
def start ( self ) : if self . _http_last_send is not None : raise RuntimeError ( 'HttpMock has already been started' ) # 1) save request.Session.send in self._last_send # 2) replace request.Session.send with MockerCore send function super ( HttpMock , self ) . start ( ) # 3) save MockerCore send function in self._http_last_send # 4) replace request.Session.send with HttpMock send function self . _patch_last_send ( )
10,254
https://github.com/peopledoc/mock-services/blob/fd3838280df8869725b538768357435eedf299c1/mock_services/http_mock.py#L63-L76
[ "def", "read_struct", "(", "fstream", ")", ":", "line", "=", "fstream", ".", "readline", "(", ")", ".", "strip", "(", ")", "fragments", "=", "line", ".", "split", "(", "\",\"", ")", "fragments", "=", "[", "x", "for", "x", "in", "fragments", "if", "x", "is", "not", "None", "]", "partition", "=", "dict", "(", ")", "if", "not", "len", "(", "fragments", ")", ">=", "3", ":", "return", "None", "partition", "[", "\"struct\"", "]", "=", "fragments", "[", "0", "]", "partition", "[", "\"info\"", "]", "=", "fragments", "[", "1", "]", "partition", "[", "\"num_lines\"", "]", "=", "fragments", "[", "2", "]", "struct", "=", "None", "if", "partition", "is", "not", "None", "and", "partition", "[", "\"struct\"", "]", "==", "\"STRUCT\"", ":", "num_lines", "=", "int", "(", "partition", "[", "\"num_lines\"", "]", ".", "strip", "(", ")", ")", "struct", "=", "{", "}", "for", "_", "in", "range", "(", "num_lines", ")", ":", "cols", "=", "fetch_cols", "(", "fstream", ")", "struct", ".", "update", "(", "{", "cols", "[", "0", "]", ":", "cols", "[", "1", ":", "]", "}", ")", "return", "struct" ]
Unregisters a handler
def unhandle ( self , handler ) : h , _ , _ = self . _extract ( handler ) key = hash ( h ) with self . _hlock : if key not in self . handlers : raise ValueError ( 'Handler "%s" was not found' % str ( h ) ) handlers = self . handlers . copy ( ) del handlers [ key ] self . handlers = handlers return self
10,255
https://github.com/axel-events/axel/blob/08a663347ef21614b96f92f60f4de57a502db73c/axel/axel.py#L163-L173
[ "def", "print_class_details", "(", "self", ",", "fname", ",", "classname", ")", ":", "fobj", "=", "open", "(", "fname", ",", "\"w\"", ")", "fobj", ".", "write", "(", "self", ".", "header", "%", "(", "classname", ",", "self", ".", "style", ")", ")", "fobj", ".", "write", "(", "\"<h1>%s</h1>\\n\"", "%", "(", "classname", ")", ")", "sizes", "=", "[", "tobj", ".", "get_max_size", "(", ")", "for", "tobj", "in", "self", ".", "index", "[", "classname", "]", "]", "total", "=", "0", "for", "s", "in", "sizes", ":", "total", "+=", "s", "data", "=", "{", "'cnt'", ":", "len", "(", "self", ".", "index", "[", "classname", "]", ")", ",", "'cls'", ":", "classname", "}", "data", "[", "'avg'", "]", "=", "pp", "(", "total", "/", "len", "(", "sizes", ")", ")", "data", "[", "'max'", "]", "=", "pp", "(", "max", "(", "sizes", ")", ")", "data", "[", "'min'", "]", "=", "pp", "(", "min", "(", "sizes", ")", ")", "fobj", ".", "write", "(", "self", ".", "class_summary", "%", "data", ")", "fobj", ".", "write", "(", "self", ".", "charts", "[", "classname", "]", ")", "fobj", ".", "write", "(", "\"<h2>Coalesced Referents per Snapshot</h2>\\n\"", ")", "for", "snapshot", "in", "self", ".", "snapshots", ":", "if", "classname", "in", "snapshot", ".", "classes", ":", "merged", "=", "snapshot", ".", "classes", "[", "classname", "]", "[", "'merged'", "]", "fobj", ".", "write", "(", "self", ".", "class_snapshot", "%", "{", "'name'", ":", "snapshot", ".", "desc", ",", "'cls'", ":", "classname", ",", "'total'", ":", "pp", "(", "merged", ".", "size", ")", "}", ")", "if", "merged", ".", "refs", ":", "self", ".", "_print_refs", "(", "fobj", ",", "merged", ".", "refs", ",", "merged", ".", "size", ")", "else", ":", "fobj", ".", "write", "(", "'<p>No per-referent sizes recorded.</p>\\n'", ")", "fobj", ".", "write", "(", "\"<h2>Instances</h2>\\n\"", ")", "for", "tobj", "in", "self", ".", "index", "[", "classname", "]", ":", "fobj", ".", "write", "(", "'<table id=\"tl\" width=\"100%\" rules=\"rows\">\\n'", ")", "fobj", ".", "write", "(", "'<tr><td id=\"hl\" width=\"140px\">Instance</td><td id=\"hl\">%s at 0x%08x</td></tr>\\n'", "%", "(", "tobj", ".", "name", ",", "tobj", ".", "id", ")", ")", "if", "tobj", ".", "repr", ":", "fobj", ".", "write", "(", "\"<tr><td>Representation</td><td>%s&nbsp;</td></tr>\\n\"", "%", "tobj", ".", "repr", ")", "fobj", ".", "write", "(", "\"<tr><td>Lifetime</td><td>%s - %s</td></tr>\\n\"", "%", "(", "pp_timestamp", "(", "tobj", ".", "birth", ")", ",", "pp_timestamp", "(", "tobj", ".", "death", ")", ")", ")", "if", "tobj", ".", "trace", ":", "trace", "=", "\"<pre>%s</pre>\"", "%", "(", "_format_trace", "(", "tobj", ".", "trace", ")", ")", "fobj", ".", "write", "(", "\"<tr><td>Instantiation</td><td>%s</td></tr>\\n\"", "%", "trace", ")", "for", "(", "timestamp", ",", "size", ")", "in", "tobj", ".", "snapshots", ":", "fobj", ".", "write", "(", "\"<tr><td>%s</td>\"", "%", "pp_timestamp", "(", "timestamp", ")", ")", "if", "not", "size", ".", "refs", ":", "fobj", ".", "write", "(", "\"<td>%s</td></tr>\\n\"", "%", "pp", "(", "size", ".", "size", ")", ")", "else", ":", "fobj", ".", "write", "(", "\"<td>%s\"", "%", "pp", "(", "size", ".", "size", ")", ")", "self", ".", "_print_refs", "(", "fobj", ",", "size", ".", "refs", ",", "size", ".", "size", ")", "fobj", ".", "write", "(", "\"</td></tr>\\n\"", ")", "fobj", ".", "write", "(", "\"</table>\\n\"", ")", "fobj", ".", "write", "(", "self", ".", "footer", ")", "fobj", ".", "close", "(", ")" ]
Stores all registered handlers in a queue for processing
def fire ( self , * args , * * kw ) : result = [ ] with self . _hlock : handlers = self . handlers if self . threads == 0 : # same-thread execution - synchronized for k in handlers : # handler, memoize, timeout h , m , t = handlers [ k ] try : r = self . _memoize ( h , m , t , * args , * * kw ) result . append ( tuple ( r ) ) except : result . append ( ( False , self . _error ( sys . exc_info ( ) ) , h ) ) elif self . threads > 0 : # multi-thread execution - desynchronized if self.threads > 1 queue = Queue ( ) # result lock just in case [].append() is not # thread-safe in other Python implementations rlock = RLock ( ) def _execute ( * args , * * kw ) : """ Executes all handlers stored in the queue """ while True : try : item = queue . get ( ) if item is None : queue . task_done ( ) break # handler, memoize, timeout h , m , t = handlers [ item ] # call under active lock try : r = self . _memoize ( h , m , t , * args , * * kw ) if not self . asynch : with rlock : result . append ( tuple ( r ) ) except : if not self . asynch : with rlock : result . append ( ( False , self . _error ( sys . exc_info ( ) ) , h ) ) queue . task_done ( ) except Empty : # never triggered, just to be safe break if handlers : threads = self . _threads ( handlers = handlers ) for _ in range ( threads ) : t = Thread ( target = _execute , args = args , kwargs = kw ) t . daemon = True t . start ( ) for k in handlers : queue . put ( k ) if self . asynch : # main thread, no locking required h , _ , _ = handlers [ k ] result . append ( ( None , None , h ) ) for _ in range ( threads ) : queue . put ( None ) # stop each worker if not self . asynch : queue . join ( ) return tuple ( result ) or None
10,256
https://github.com/axel-events/axel/blob/08a663347ef21614b96f92f60f4de57a502db73c/axel/axel.py#L175-L247
[ "def", "native_libraries_verify", "(", ")", ":", "with", "open", "(", "BINARY_EXT_TEMPLATE", ",", "\"r\"", ")", "as", "file_obj", ":", "template", "=", "file_obj", ".", "read", "(", ")", "expected", "=", "template", ".", "format", "(", "revision", "=", "REVISION", ")", "with", "open", "(", "BINARY_EXT_FILE", ",", "\"r\"", ")", "as", "file_obj", ":", "contents", "=", "file_obj", ".", "read", "(", ")", "if", "contents", "!=", "expected", ":", "err_msg", "=", "\"\\n\"", "+", "get_diff", "(", "contents", ",", "expected", ",", "\"docs/python/binary-extension.rst.actual\"", ",", "\"docs/python/binary-extension.rst.expected\"", ",", ")", "raise", "ValueError", "(", "err_msg", ")", "else", ":", "print", "(", "\"docs/python/binary-extension.rst contents are as expected.\"", ")" ]
Discards all registered handlers and cached results
def clear ( self ) : with self . _hlock : self . handlers . clear ( ) with self . _mlock : self . memoize . clear ( )
10,257
https://github.com/axel-events/axel/blob/08a663347ef21614b96f92f60f4de57a502db73c/axel/axel.py#L254-L259
[ "def", "save", "(", "self", ",", "create_multiple_renditions", "=", "True", ",", "preserve_source_rendition", "=", "True", ",", "encode_to", "=", "enums", ".", "EncodeToEnum", ".", "FLV", ")", ":", "if", "is_ftp_connection", "(", "self", ".", "connection", ")", "and", "len", "(", "self", ".", "assets", ")", ">", "0", ":", "self", ".", "connection", ".", "post", "(", "xml", "=", "self", ".", "to_xml", "(", ")", ",", "assets", "=", "self", ".", "assets", ")", "elif", "not", "self", ".", "id", "and", "self", ".", "_filename", ":", "self", ".", "id", "=", "self", ".", "connection", ".", "post", "(", "'create_video'", ",", "self", ".", "_filename", ",", "create_multiple_renditions", "=", "create_multiple_renditions", ",", "preserve_source_rendition", "=", "preserve_source_rendition", ",", "encode_to", "=", "encode_to", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "elif", "not", "self", ".", "id", "and", "len", "(", "self", ".", "renditions", ")", ">", "0", ":", "self", ".", "id", "=", "self", ".", "connection", ".", "post", "(", "'create_video'", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "elif", "self", ".", "id", ":", "data", "=", "self", ".", "connection", ".", "post", "(", "'update_video'", ",", "video", "=", "self", ".", "_to_dict", "(", ")", ")", "if", "data", ":", "self", ".", "_load", "(", "data", ")" ]
Controls the time allocated for the execution of a method
def _timeout ( self , timeout , handler , * args , * * kw ) : t = spawn_thread ( target = handler , args = args , kw = kw ) t . daemon = True t . start ( ) t . join ( timeout ) if not t . is_alive ( ) : if t . exc_info : return t . exc_info return t . result else : try : msg = '[%s] Execution was forcefully terminated' raise RuntimeError ( msg % t . name ) except : return sys . exc_info ( )
10,258
https://github.com/axel-events/axel/blob/08a663347ef21614b96f92f60f4de57a502db73c/axel/axel.py#L336-L352
[ "def", "find_package_indexes_in_dir", "(", "self", ",", "simple_dir", ")", ":", "packages", "=", "sorted", "(", "{", "# Filter out all of the \"non\" normalized names here", "canonicalize_name", "(", "x", ")", "for", "x", "in", "os", ".", "listdir", "(", "simple_dir", ")", "}", ")", "# Package indexes must be in directories, so ignore anything else.", "packages", "=", "[", "x", "for", "x", "in", "packages", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "simple_dir", ",", "x", ")", ")", "]", "return", "packages" ]
Calculates maximum number of threads that will be started
def _threads ( self , handlers ) : if self . threads < len ( handlers ) : return self . threads return len ( handlers )
10,259
https://github.com/axel-events/axel/blob/08a663347ef21614b96f92f60f4de57a502db73c/axel/axel.py#L354-L358
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Read raw data and calculate temperature and humidity .
def update ( self ) : if not self . _ok : self . log_error ( "Trying to restore OK mode w/ soft reset" ) self . _ok = self . _soft_reset ( ) try : self . _bus . write_byte ( self . _i2c_add , CMD_READ_TEMP_NOHOLD ) sleep ( MEASUREMENT_WAIT_TIME ) buf_t = self . _bus . read_i2c_block_data ( self . _i2c_add , CMD_READ_TEMP_HOLD , 3 ) self . _bus . write_byte ( self . _i2c_add , CMD_READ_HUM_NOHOLD ) sleep ( MEASUREMENT_WAIT_TIME ) buf_h = self . _bus . read_i2c_block_data ( self . _i2c_add , CMD_READ_HUM_HOLD , 3 ) except OSError as exc : self . _ok = False self . log_error ( "Bad reading: %s" , exc ) return if self . _crc8check ( buf_t ) : temp = ( buf_t [ 0 ] << 8 | buf_t [ 1 ] ) & 0xFFFC self . _temperature = self . _calc_temp ( temp ) if self . _crc8check ( buf_h ) : humid = ( buf_h [ 0 ] << 8 | buf_h [ 1 ] ) & 0xFFFC rh_actual = self . _calc_humid ( humid ) # For temperature coefficient compensation rh_final = self . _temp_coefficient ( rh_actual , self . _temperature ) rh_final = 100.0 if rh_final > 100 else rh_final # Clamp > 100 rh_final = 0.0 if rh_final < 0 else rh_final # Clamp < 0 self . _humidity = rh_final else : self . _humidity = - 255 self . _ok = False self . log_error ( "Bad CRC error with humidity" ) else : self . _temperature = - 255 self . _ok = False self . log_error ( "Bad CRC error with temperature" )
10,260
https://github.com/azogue/i2csense/blob/ecc6806dcee9de827a5414a9e836d271fedca9b9/i2csense/htu21d.py#L87-L126
[ "def", "update_notebook_actions", "(", "self", ")", ":", "if", "self", ".", "recent_notebooks", ":", "self", ".", "clear_recent_notebooks_action", ".", "setEnabled", "(", "True", ")", "else", ":", "self", ".", "clear_recent_notebooks_action", ".", "setEnabled", "(", "False", ")", "client", "=", "self", ".", "get_current_client", "(", ")", "if", "client", ":", "if", "client", ".", "get_filename", "(", ")", "!=", "WELCOME", ":", "self", ".", "save_as_action", ".", "setEnabled", "(", "True", ")", "self", ".", "open_console_action", ".", "setEnabled", "(", "True", ")", "self", ".", "options_menu", ".", "clear", "(", ")", "add_actions", "(", "self", ".", "options_menu", ",", "self", ".", "menu_actions", ")", "return", "self", ".", "save_as_action", ".", "setEnabled", "(", "False", ")", "self", ".", "open_console_action", ".", "setEnabled", "(", "False", ")", "self", ".", "options_menu", ".", "clear", "(", ")", "add_actions", "(", "self", ".", "options_menu", ",", "self", ".", "menu_actions", ")" ]
Return workflow owner access token .
def get_owner_access_token ( self ) : from . database import Session db_session = Session . object_session ( self ) owner = db_session . query ( User ) . filter_by ( id_ = self . owner_id ) . first ( ) return owner . access_token
10,261
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/models.py#L167-L173
[ "def", "_mmUpdateDutyCycles", "(", "self", ")", ":", "period", "=", "self", ".", "getDutyCyclePeriod", "(", ")", "unionSDRArray", "=", "numpy", ".", "zeros", "(", "self", ".", "getNumColumns", "(", ")", ")", "unionSDRArray", "[", "list", "(", "self", ".", "_mmTraces", "[", "\"unionSDR\"", "]", ".", "data", "[", "-", "1", "]", ")", "]", "=", "1", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"unionSDRDutyCycle\"", "]", ",", "unionSDRArray", ",", "period", ")", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", "=", "UnionTemporalPoolerMonitorMixin", ".", "_mmUpdateDutyCyclesHelper", "(", "self", ".", "_mmData", "[", "\"persistenceDutyCycle\"", "]", ",", "self", ".", "_poolingActivation", ",", "period", ")" ]
Update database workflow status .
def update_workflow_status ( db_session , workflow_uuid , status , new_logs = '' , message = None ) : try : workflow = db_session . query ( Workflow ) . filter_by ( id_ = workflow_uuid ) . first ( ) if not workflow : raise Exception ( 'Workflow {0} doesn\'t exist in database.' . format ( workflow_uuid ) ) if status : workflow . status = status if new_logs : workflow . logs = ( workflow . logs or '' ) + new_logs + '\n' db_session . commit ( ) except Exception as e : raise e
10,262
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/models.py#L176-L198
[ "def", "removefromreadergroup", "(", "self", ",", "groupname", ")", ":", "hresult", ",", "hcontext", "=", "SCardEstablishContext", "(", "SCARD_SCOPE_USER", ")", "if", "0", "!=", "hresult", ":", "raise", "EstablishContextException", "(", "hresult", ")", "try", ":", "hresult", "=", "SCardRemoveReaderFromGroup", "(", "hcontext", ",", "self", ".", "name", ",", "groupname", ")", "if", "0", "!=", "hresult", ":", "raise", "RemoveReaderFromGroupException", "(", "hresult", ",", "self", ".", "name", ",", "groupname", ")", "finally", ":", "hresult", "=", "SCardReleaseContext", "(", "hcontext", ")", "if", "0", "!=", "hresult", ":", "raise", "ReleaseContextException", "(", "hresult", ")" ]
Parse address and returns host and port
def parse_server_addr ( str_addr , default_port = 26000 ) : m = ADDR_STR_RE . match ( str_addr ) if m is None : raise ValueError ( 'Bad address string "{0}"' . format ( str_addr ) ) dct = m . groupdict ( ) port = dct . get ( 'port' ) if port is None : port = default_port else : port = int ( port ) # Caution: could raise ValueEror or TypeError if port == 0 : raise ValueError ( "Port can't be zero" ) host = dct [ 'host' ] if dct [ 'host' ] else dct [ 'host6' ] return host , port
10,263
https://github.com/bacher09/xrcon/blob/6a883f780265cbca31af7a379dc7cb28fdd8b73f/xrcon/utils.py#L103-L142
[ "def", "deploy_template", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "deployment_name", ",", "template", ",", "parameters", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourcegroups/'", ",", "resource_group", ",", "'/providers/Microsoft.Resources/deployments/'", ",", "deployment_name", ",", "'?api-version='", ",", "DEPLOYMENTS_API", "]", ")", "properties", "=", "{", "'template'", ":", "template", "}", "properties", "[", "'mode'", "]", "=", "'Incremental'", "properties", "[", "'parameters'", "]", "=", "parameters", "template_body", "=", "{", "'properties'", ":", "properties", "}", "body", "=", "json", ".", "dumps", "(", "template_body", ")", "return", "do_put", "(", "endpoint", ",", "body", ",", "access_token", ")" ]
Request a go to assignment .
def request_goto ( self , tc = None ) : if not tc : tc = TextHelper ( self . editor ) . word_under_cursor ( select_whole_word = True ) if not self . _definition or isinstance ( self . sender ( ) , QAction ) : self . select_word ( tc ) if self . _definition is not None : QTimer . singleShot ( 100 , self . _goto_def )
10,264
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/modes/goto.py#L122-L137
[ "def", "get_directory_properties", "(", "self", ",", "share_name", ",", "directory_name", ",", "timeout", "=", "None", ",", "snapshot", "=", "None", ")", ":", "_validate_not_none", "(", "'share_name'", ",", "share_name", ")", "_validate_not_none", "(", "'directory_name'", ",", "directory_name", ")", "request", "=", "HTTPRequest", "(", ")", "request", ".", "method", "=", "'GET'", "request", ".", "host_locations", "=", "self", ".", "_get_host_locations", "(", ")", "request", ".", "path", "=", "_get_path", "(", "share_name", ",", "directory_name", ")", "request", ".", "query", "=", "{", "'restype'", ":", "'directory'", ",", "'timeout'", ":", "_int_to_str", "(", "timeout", ")", ",", "'sharesnapshot'", ":", "_to_str", "(", "snapshot", ")", "}", "return", "self", ".", "_perform_request", "(", "request", ",", "_parse_directory", ",", "[", "directory_name", "]", ")" ]
Return a copy of the template with the specified name . If not found or an error occurs during the load return None .
def get_template ( name ) : path = os . path . join ( base_dir , name ) if path not in templates : try : templates [ path ] = Template ( path ) except IOError : return None return copy . deepcopy ( templates [ path ] )
10,265
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L366-L379
[ "def", "__normalize", "(", "self", ")", ":", "# Don't normalize if we're already normalizing or intializing", "if", "self", ".", "__normalizing", "is", "True", "or", "self", ".", "__initialized", "is", "False", ":", "return", "self", ".", "__normalizing", "=", "True", "self", ".", "__normalize_grades", "(", ")", "self", ".", "__normalize_progress", "(", ")", "self", ".", "__normalizing", "=", "False" ]
Set the content of an xml element marked with the matching eid attribute .
def set_value ( self , eid , val , idx = '*' ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] if type ( val ) in SEQ_TYPES : idx = 0 if idx == '*' : for elem in elems : self . __set_value ( eid , elem , val , idx ) elif idx < len ( elems ) : self . __set_value ( eid , elems [ idx ] , val , idx )
10,266
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L220-L232
[ "def", "get_dummy_request", "(", "language", "=", "None", ")", ":", "if", "settings", ".", "ALLOWED_HOSTS", "and", "settings", ".", "ALLOWED_HOSTS", "!=", "\"*\"", ":", "host", "=", "settings", ".", "ALLOWED_HOSTS", "[", "0", "]", "else", ":", "host", "=", "Site", ".", "objects", ".", "get_current", "(", ")", ".", "domain", "request", "=", "RequestFactory", "(", ")", ".", "get", "(", "\"/\"", ",", "HTTP_HOST", "=", "host", ")", "request", ".", "session", "=", "{", "}", "request", ".", "LANGUAGE_CODE", "=", "language", "or", "settings", ".", "LANGUAGE_CODE", "# Needed for plugin rendering.", "request", ".", "current_page", "=", "None", "if", "'django.contrib.auth'", "in", "settings", ".", "INSTALLED_APPS", ":", "from", "django", ".", "contrib", ".", "auth", ".", "models", "import", "AnonymousUser", "request", ".", "user", "=", "AnonymousUser", "(", ")", "return", "request" ]
Set the value of an xml attribute marked with the matching aid attribute .
def set_attribute ( self , aid , attrib , val , idx = '*' ) : if aid in self . __attrib_ids : elems = self . __attrib_ids [ aid ] if idx == '*' : for elem in elems : self . __set_attribute ( elem , attrib , val ) elif idx < len ( elems ) : elem = elems [ idx ] self . __set_attribute ( elem , attrib , val )
10,267
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L244-L255
[ "def", "_copy_database_data_clientside", "(", "self", ",", "tables", ",", "source", ",", "destination", ")", ":", "# Retrieve database rows", "rows", "=", "self", ".", "get_database_rows", "(", "tables", ",", "source", ")", "# Retrieve database columns", "cols", "=", "self", ".", "get_database_columns", "(", "tables", ",", "source", ")", "# Validate rows and columns", "for", "r", "in", "list", "(", "rows", ".", "keys", "(", ")", ")", ":", "assert", "r", "in", "tables", "for", "c", "in", "list", "(", "cols", ".", "keys", "(", ")", ")", ":", "assert", "c", "in", "tables", "# Change database to destination", "self", ".", "change_db", "(", "destination", ")", "# Get insert queries", "insert_queries", "=", "self", ".", "_get_insert_commands", "(", "rows", ",", "cols", ")", "# Execute insert queries", "self", ".", "_execute_insert_commands", "(", "insert_queries", ")" ]
Hide the element with the matching eid . If no match look for an element with a matching rid .
def hide ( self , eid , index = 0 ) : elems = None if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] if elems and index < len ( elems ) : elem = elems [ index ] elem . parent . children . remove ( elem )
10,268
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L279-L291
[ "def", "create_api_client", "(", "api", "=", "'BatchV1'", ")", ":", "k8s_config", ".", "load_incluster_config", "(", ")", "api_configuration", "=", "client", ".", "Configuration", "(", ")", "api_configuration", ".", "verify_ssl", "=", "False", "if", "api", "==", "'extensions/v1beta1'", ":", "api_client", "=", "client", ".", "ExtensionsV1beta1Api", "(", ")", "elif", "api", "==", "'CoreV1'", ":", "api_client", "=", "client", ".", "CoreV1Api", "(", ")", "elif", "api", "==", "'StorageV1'", ":", "api_client", "=", "client", ".", "StorageV1Api", "(", ")", "else", ":", "api_client", "=", "client", ".", "BatchV1Api", "(", ")", "return", "api_client" ]
Repeat an xml element marked with the matching rid .
def repeat ( self , rid , count , index = 0 ) : elems = None if rid in self . __repeat_ids : elems = self . __repeat_ids [ rid ] elif rid in self . __element_ids : elems = self . __element_ids if elems and index < len ( elems ) : elem = elems [ index ] self . __repeat ( elem , count )
10,269
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L293-L305
[ "def", "_setup_conn_old", "(", "*", "*", "kwargs", ")", ":", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.api_url'", ",", "'http://localhost:8080'", ")", "username", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.user'", ")", "password", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.password'", ")", "ca_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-data'", ")", "client_cert", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-data'", ")", "client_key", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-data'", ")", "ca_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.certificate-authority-file'", ")", "client_cert_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-certificate-file'", ")", "client_key_file", "=", "__salt__", "[", "'config.option'", "]", "(", "'kubernetes.client-key-file'", ")", "# Override default API settings when settings are provided", "if", "'api_url'", "in", "kwargs", ":", "host", "=", "kwargs", ".", "get", "(", "'api_url'", ")", "if", "'api_user'", "in", "kwargs", ":", "username", "=", "kwargs", ".", "get", "(", "'api_user'", ")", "if", "'api_password'", "in", "kwargs", ":", "password", "=", "kwargs", ".", "get", "(", "'api_password'", ")", "if", "'api_certificate_authority_file'", "in", "kwargs", ":", "ca_cert_file", "=", "kwargs", ".", "get", "(", "'api_certificate_authority_file'", ")", "if", "'api_client_certificate_file'", "in", "kwargs", ":", "client_cert_file", "=", "kwargs", ".", "get", "(", "'api_client_certificate_file'", ")", "if", "'api_client_key_file'", "in", "kwargs", ":", "client_key_file", "=", "kwargs", ".", "get", "(", "'api_client_key_file'", ")", "if", "(", "kubernetes", ".", "client", ".", "configuration", ".", "host", "!=", "host", "or", "kubernetes", ".", "client", ".", "configuration", ".", "user", "!=", "username", "or", "kubernetes", ".", "client", ".", "configuration", ".", "password", "!=", "password", ")", ":", "# Recreates API connection if settings are changed", "kubernetes", ".", "client", ".", "configuration", ".", "__init__", "(", ")", "kubernetes", ".", "client", ".", "configuration", ".", "host", "=", "host", "kubernetes", ".", "client", ".", "configuration", ".", "user", "=", "username", "kubernetes", ".", "client", ".", "configuration", ".", "passwd", "=", "password", "if", "ca_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca_cert_file", "elif", "ca_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "ca", ":", "ca", ".", "write", "(", "base64", ".", "b64decode", "(", "ca_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "ca", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "ssl_ca_cert", "=", "None", "if", "client_cert_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "client_cert_file", "elif", "client_cert", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "c", ":", "c", ".", "write", "(", "base64", ".", "b64decode", "(", "client_cert", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "c", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "cert_file", "=", "None", "if", "client_key_file", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "client_key_file", "elif", "client_key", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "prefix", "=", "'salt-kube-'", ",", "delete", "=", "False", ")", "as", "k", ":", "k", ".", "write", "(", "base64", ".", "b64decode", "(", "client_key", ")", ")", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "k", ".", "name", "else", ":", "kubernetes", ".", "client", ".", "configuration", ".", "key_file", "=", "None", "return", "{", "}" ]
Replace an xml element marked with the matching eid . If the replacement value is an Element or TextElement it s swapped in untouched . If it s a Template the children of the root element in the template are used . Otherwise the replacement value is wrapped with a TextElement .
def replace ( self , eid , replacement , index = 0 ) : if eid in self . __element_ids : elems = self . __element_ids [ eid ] elif eid in self . __repeat_ids : elems = self . __repeat_ids [ eid ] else : return if index < len ( elems ) : elem = elems [ index ] current_pos = elem . parent . children . index ( elem ) elem . parent . children . remove ( elem ) replacement_type = type ( replacement ) if replacement_type in ( Element , TextElement ) : self . check_element ( replacement , True ) elem . parent . children . insert ( current_pos , replacement ) replacement . parent = elem . parent elif replacement_type == Template : for child in replacement . root . children : elem . parent . children . insert ( current_pos , child ) child . parent = elem . parent current_pos += 1 self . __merge_ids ( self . __element_ids , replacement . __element_ids ) self . __merge_ids ( self . __attrib_ids , replacement . __attrib_ids ) self . __merge_ids ( self . __repeat_ids , replacement . __repeat_ids ) else : elem . parent . children . insert ( current_pos , TextElement ( replacement ) )
10,270
https://github.com/jasonrbriggs/proton/blob/e734734750797ef0caaa1680379e07b86d7a53e3/python/proton/template.py#L316-L347
[ "def", "from_file", "(", "cls", ",", "h5_file", ")", ":", "return", "cls", "(", "{", "country", ":", "HDF5DailyBarReader", ".", "from_file", "(", "h5_file", ",", "country", ")", "for", "country", "in", "h5_file", ".", "keys", "(", ")", "}", ")" ]
Updates the has algorithm and optionally the number of rounds to use .
def set_hasher ( self , hash , rounds = None ) : hash = hash . replace ( '-' , '_' ) if hash not in VALID_HASHERS : raise WrongHashAlgorithm ( WRONG_HASH_MESSAGE ) hasher = getattr ( ph , hash ) utils . test_hasher ( hasher ) default_rounds = getattr ( hasher , 'default_rounds' , 1 ) min_rounds = getattr ( hasher , 'min_rounds' , 1 ) max_rounds = getattr ( hasher , 'max_rounds' , float ( "inf" ) ) rounds = min ( max ( rounds or default_rounds , min_rounds ) , max_rounds ) op = { 'schemes' : VALID_HASHERS + DEPRECATED_HASHERS , 'deprecated' : DEPRECATED_HASHERS , 'default' : hash , hash + '__default_rounds' : rounds } self . hasher = CryptContext ( * * op ) self . hash = hash . replace ( '_' , '-' ) # For testing self . rounds = rounds
10,271
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/auth.py#L140-L167
[ "def", "build_agency", "(", "pfeed", ")", ":", "return", "pd", ".", "DataFrame", "(", "{", "'agency_name'", ":", "pfeed", ".", "meta", "[", "'agency_name'", "]", ".", "iat", "[", "0", "]", ",", "'agency_url'", ":", "pfeed", ".", "meta", "[", "'agency_url'", "]", ".", "iat", "[", "0", "]", ",", "'agency_timezone'", ":", "pfeed", ".", "meta", "[", "'agency_timezone'", "]", ".", "iat", "[", "0", "]", ",", "}", ",", "index", "=", "[", "0", "]", ")" ]
Convert a string to a boolean value .
def to_bool ( value , do_raise = True ) : value = value . lower ( ) # Try it as an integer if value . isdigit ( ) : return bool ( int ( value ) ) # OK, check it against the true/false values... if value in _str_true : return True elif value in _str_false : return False # Not recognized if do_raise : raise ValueError ( "invalid literal for to_bool(): %r" % value ) return False
10,272
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/config.py#L228-L254
[ "def", "generate_context", "(", "force_overwrite", "=", "False", ",", "drop_secret_key", "=", "False", ")", ":", "print", "(", "'... generating context'", ")", "context_fp", "=", "'%s/context.json'", "%", "os", ".", "environ", "[", "'PRODUCT_DIR'", "]", "context", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "context_fp", ")", ":", "print", "(", "'... augment existing context.json'", ")", "with", "open", "(", "context_fp", ",", "'r'", ")", "as", "context_f", ":", "content", "=", "context_f", ".", "read", "(", ")", ".", "strip", "(", ")", "or", "'{}'", "try", ":", "context", "=", "json", ".", "loads", "(", "content", ")", "except", "ValueError", ":", "print", "(", "'ERROR: not valid json in your existing context.json!!!'", ")", "return", "if", "force_overwrite", ":", "print", "(", "'... overwriting existing context.json'", ")", "if", "drop_secret_key", ":", "print", "(", "'... generating new SECRET_KEY'", ")", "context", "=", "{", "}", "else", ":", "print", "(", "'... using existing SECRET_KEY from existing context.json'", ")", "context", "=", "{", "'SECRET_KEY'", ":", "context", "[", "'SECRET_KEY'", "]", "}", "with", "open", "(", "context_fp", ",", "'w'", ")", "as", "context_f", ":", "new_context", "=", "tasks", ".", "get_context_template", "(", ")", "new_context", ".", "update", "(", "context", ")", "context_f", ".", "write", "(", "json", ".", "dumps", "(", "new_context", ",", "indent", "=", "4", ",", "sort_keys", "=", "True", ")", ")", "print", "(", ")", "print", "(", "'*** Successfully generated context.json'", ")" ]
Transform this layer into another layer type
async def become ( self , layer_type : Type [ L ] , request : 'Request' ) -> L : raise ValueError ( 'Cannot become "{}"' . format ( layer_type . __name__ ) )
10,273
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/definitions.py#L75-L80
[ "def", "get_file_descriptor", "(", "self", ")", ":", "return", "self", ".", "_subscription", ".", "connection", "and", "self", ".", "_subscription", ".", "connection", ".", "_sock", ".", "fileno", "(", ")" ]
Transforms the translatable string into an actual string and put it inside a RawText .
async def become ( self , layer_type : Type [ L ] , request : 'Request' ) : if layer_type != RawText : super ( Text , self ) . become ( layer_type , request ) return RawText ( await render ( self . text , request ) )
10,274
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/layers/definitions.py#L110-L118
[ "def", "_validate_channel_definition", "(", "self", ",", "jp2h", ",", "colr", ")", ":", "cdef_lst", "=", "[", "j", "for", "(", "j", ",", "box", ")", "in", "enumerate", "(", "jp2h", ".", "box", ")", "if", "box", ".", "box_id", "==", "'cdef'", "]", "if", "len", "(", "cdef_lst", ")", ">", "1", ":", "msg", "=", "(", "\"Only one channel definition box is allowed in the \"", "\"JP2 header.\"", ")", "raise", "IOError", "(", "msg", ")", "elif", "len", "(", "cdef_lst", ")", "==", "1", ":", "cdef", "=", "jp2h", ".", "box", "[", "cdef_lst", "[", "0", "]", "]", "if", "colr", ".", "colorspace", "==", "core", ".", "SRGB", ":", "if", "any", "(", "[", "chan", "+", "1", "not", "in", "cdef", ".", "association", "or", "cdef", ".", "channel_type", "[", "chan", "]", "!=", "0", "for", "chan", "in", "[", "0", ",", "1", ",", "2", "]", "]", ")", ":", "msg", "=", "(", "\"All color channels must be defined in the \"", "\"channel definition box.\"", ")", "raise", "IOError", "(", "msg", ")", "elif", "colr", ".", "colorspace", "==", "core", ".", "GREYSCALE", ":", "if", "0", "not", "in", "cdef", ".", "channel_type", ":", "msg", "=", "(", "\"All color channels must be defined in the \"", "\"channel definition box.\"", ")", "raise", "IOError", "(", "msg", ")" ]
Make the register storage .
def _make_register ( self ) -> BaseRegisterStore : s = settings . REGISTER_STORE store_class = import_class ( s [ 'class' ] ) return store_class ( * * s [ 'params' ] )
10,275
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L141-L148
[ "def", "get_attachment_content", "(", "self", ",", "ticket_id", ",", "attachment_id", ")", ":", "msg", "=", "self", ".", "__request", "(", "'ticket/{}/attachments/{}/content'", ".", "format", "(", "str", "(", "ticket_id", ")", ",", "str", "(", "attachment_id", ")", ")", ",", "text_response", "=", "False", ")", "lines", "=", "msg", ".", "split", "(", "b'\\n'", ",", "3", ")", "if", "(", "len", "(", "lines", ")", "==", "4", ")", "and", "(", "self", ".", "RE_PATTERNS", "[", "'invalid_attachment_pattern_bytes'", "]", ".", "match", "(", "lines", "[", "2", "]", ")", "or", "self", ".", "RE_PATTERNS", "[", "'does_not_exist_pattern_bytes'", "]", ".", "match", "(", "lines", "[", "2", "]", ")", ")", ":", "return", "None", "return", "msg", "[", "msg", ".", "find", "(", "b'\\n'", ")", "+", "2", ":", "-", "3", "]" ]
Load the transitions file .
def _make_transitions ( self ) -> List [ Transition ] : module_name = settings . TRANSITIONS_MODULE module_ = importlib . import_module ( module_name ) return module_ . transitions
10,276
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L150-L157
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Sometimes we load states from the database . In order to avoid loading an arbitrary class we list here the state classes that are allowed .
def _make_allowed_states ( self ) -> Iterator [ Text ] : for trans in self . transitions : yield trans . dest . name ( ) if trans . origin : yield trans . origin . name ( )
10,277
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L159-L169
[ "def", "set_descriptor", "(", "dev", ",", "desc", ",", "desc_type", ",", "desc_index", ",", "wIndex", "=", "None", ")", ":", "wValue", "=", "desc_index", "|", "(", "desc_type", "<<", "8", ")", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_OUT", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x07", ",", "wValue", "=", "wValue", ",", "wIndex", "=", "wIndex", ",", "data_or_wLength", "=", "desc", ")" ]
Find the best trigger for this request or go away .
async def _find_trigger ( self , request : Request , origin : Optional [ Text ] = None , internal : bool = False ) -> Tuple [ Optional [ BaseTrigger ] , Optional [ Type [ BaseState ] ] , Optional [ bool ] , ] : reg = request . register if not origin : origin = reg . get ( Register . STATE ) logger . debug ( 'From state: %s' , origin ) results = await asyncio . gather ( * ( x . rank ( request , origin ) for x in self . transitions if x . internal == internal ) ) if len ( results ) : score , trigger , state , dnr = max ( results , key = lambda x : x [ 0 ] ) if score >= settings . MINIMAL_TRIGGER_SCORE : return trigger , state , dnr return None , None , None
10,278
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L171-L203
[ "def", "_read_data", "(", "fname", ")", ":", "energy", "=", "[", "]", "with", "open", "(", "fname", ",", "'r'", ")", "as", "f", ":", "for", "line", "in", "f", ":", "CE", "=", "abs", "(", "float", "(", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "0", "]", ")", ")", "VASP", "=", "abs", "(", "float", "(", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "1", "]", ")", ")", "conc", "=", "[", "i", "for", "i", "in", "line", ".", "strip", "(", ")", ".", "split", "(", ")", "[", "2", ":", "]", "]", "conc_f", "=", "[", "]", "for", "c", "in", "conc", ":", "if", "'['", "in", "c", "and", "']'", "in", "c", ":", "conc_f", ".", "append", "(", "int", "(", "c", "[", "1", ":", "-", "1", "]", ")", ")", "elif", "'['", "in", "c", ":", "conc_f", ".", "append", "(", "int", "(", "c", "[", "1", ":", "-", "1", "]", ")", ")", "elif", "']'", "in", "c", "or", "','", "in", "c", ":", "conc_f", ".", "append", "(", "int", "(", "c", "[", ":", "-", "1", "]", ")", ")", "else", ":", "conc_f", ".", "append", "(", "int", "(", "c", ")", ")", "energy", ".", "append", "(", "[", "CE", ",", "VASP", ",", "conc_f", "]", ")", "return", "energy" ]
If we re confused find which state to call .
def _confused_state ( self , request : Request ) -> Type [ BaseState ] : origin = request . register . get ( Register . STATE ) if origin in self . _allowed_states : try : return import_class ( origin ) except ( AttributeError , ImportError ) : pass return import_class ( settings . DEFAULT_STATE )
10,279
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L206-L219
[ "def", "_read_body_by_chunk", "(", "self", ",", "response", ",", "file", ",", "raw", "=", "False", ")", ":", "reader", "=", "ChunkedTransferReader", "(", "self", ".", "_connection", ")", "file_is_async", "=", "hasattr", "(", "file", ",", "'drain'", ")", "while", "True", ":", "chunk_size", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_header", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "if", "not", "chunk_size", ":", "break", "while", "True", ":", "content", ",", "data", "=", "yield", "from", "reader", ".", "read_chunk_body", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "if", "not", "content", ":", "if", "raw", ":", "file", ".", "write", "(", "data", ")", "break", "content", "=", "self", ".", "_decompress_data", "(", "content", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "content", "=", "self", ".", "_flush_decompressor", "(", ")", "if", "file", ":", "file", ".", "write", "(", "content", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "trailer_data", "=", "yield", "from", "reader", ".", "read_trailer", "(", ")", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "trailer_data", ")", "if", "file", "and", "raw", ":", "file", ".", "write", "(", "trailer_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "response", ".", "fields", ".", "parse", "(", "trailer_data", ")" ]
Build the state for this request .
async def _build_state ( self , request : Request , message : BaseMessage , responder : Responder ) -> Tuple [ Optional [ BaseState ] , Optional [ BaseTrigger ] , Optional [ bool ] , ] : trigger , state_class , dnr = await self . _find_trigger ( request ) if trigger is None : if not message . should_confuse ( ) : return None , None , None state_class = self . _confused_state ( request ) logger . debug ( 'Next state: %s (confused)' , state_class . name ( ) ) else : logger . debug ( 'Next state: %s' , state_class . name ( ) ) state = state_class ( request , responder , trigger , trigger ) return state , trigger , dnr
10,280
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L221-L245
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Execute the state or if execution fails handle it .
async def _run_state ( self , responder , state , trigger , request ) -> BaseState : user_trigger = trigger # noinspection PyBroadException try : if trigger : await state . handle ( ) else : await state . confused ( ) for i in range ( 0 , settings . MAX_INTERNAL_JUMPS + 1 ) : if i == settings . MAX_INTERNAL_JUMPS : raise MaxInternalJump ( ) trigger , state_class , dnr = await self . _find_trigger ( request , state . name ( ) , True ) if not trigger : break logger . debug ( 'Jumping to state: %s' , state_class . name ( ) ) state = state_class ( request , responder , trigger , user_trigger ) await state . handle ( ) except Exception : logger . exception ( 'Error while handling state "%s"' , state . name ( ) ) responder . clear ( ) reporter . report ( request , state . name ( ) ) await state . error ( ) return state
10,281
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L247-L281
[ "def", "set_descriptor", "(", "dev", ",", "desc", ",", "desc_type", ",", "desc_index", ",", "wIndex", "=", "None", ")", ":", "wValue", "=", "desc_index", "|", "(", "desc_type", "<<", "8", ")", "bmRequestType", "=", "util", ".", "build_request_type", "(", "util", ".", "CTRL_OUT", ",", "util", ".", "CTRL_TYPE_STANDARD", ",", "util", ".", "CTRL_RECIPIENT_DEVICE", ")", "dev", ".", "ctrl_transfer", "(", "bmRequestType", "=", "bmRequestType", ",", "bRequest", "=", "0x07", ",", "wValue", "=", "wValue", ",", "wIndex", "=", "wIndex", ",", "data_or_wLength", "=", "desc", ")" ]
Build the next register to store .
async def _build_state_register ( self , state : BaseState , request : Request , responder : Responder ) -> Dict : return { Register . STATE : state . name ( ) , Register . TRANSITION : await responder . make_transition_register ( request ) , }
10,282
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/engine/fsm.py#L283-L299
[ "def", "files_comments_delete", "(", "self", ",", "*", ",", "file", ":", "str", ",", "id", ":", "str", ",", "*", "*", "kwargs", ")", "->", "SlackResponse", ":", "kwargs", ".", "update", "(", "{", "\"file\"", ":", "file", ",", "\"id\"", ":", "id", "}", ")", "return", "self", ".", "api_call", "(", "\"files.comments.delete\"", ",", "json", "=", "kwargs", ")" ]
runGetResults - Simple method to run a command and return the results of the execution as a dict .
def runGetResults ( cmd , stdout = True , stderr = True , encoding = sys . getdefaultencoding ( ) ) : if stderr in ( 'stdout' , subprocess . STDOUT ) : stderr = subprocess . STDOUT elif stderr == True or stderr == subprocess . PIPE : stderr = subprocess . PIPE else : stderr = None if stdout == True or stdout == subprocess . STDOUT : stdout = subprocess . PIPE else : stdout = None if stderr == subprocess . PIPE : raise ValueError ( 'Cannot redirect stderr to stdout if stdout is not captured.' ) if issubclass ( cmd . __class__ , ( list , tuple ) ) : shell = False else : shell = True try : pipe = subprocess . Popen ( cmd , stdout = stdout , stderr = stderr , shell = shell ) except Exception as e : try : if shell is True : cmdStr = ' ' . join ( cmd ) else : cmdStr = cmd except : cmdStr = repr ( cmd ) raise SimpleCommandFailure ( 'Failed to execute "%s": %s' % ( cmdStr , str ( e ) ) , returnCode = 255 ) streams = [ ] fileNoToKey = { } ret = { } if stdout == subprocess . PIPE : streams . append ( pipe . stdout ) fileNoToKey [ pipe . stdout . fileno ( ) ] = 'stdout' ret [ 'stdout' ] = [ ] if stderr == subprocess . PIPE : streams . append ( pipe . stderr ) fileNoToKey [ pipe . stderr . fileno ( ) ] = 'stderr' ret [ 'stderr' ] = [ ] returnCode = None time . sleep ( .02 ) while returnCode is None or streams : returnCode = pipe . poll ( ) while True : ( readyToRead , junk1 , junk2 ) = select . select ( streams , [ ] , [ ] , .005 ) if not readyToRead : # Don't strangle CPU time . sleep ( .01 ) break for readyStream in readyToRead : retKey = fileNoToKey [ readyStream . fileno ( ) ] curRead = readyStream . read ( ) if curRead in ( b'' , '' ) : streams . remove ( readyStream ) continue ret [ retKey ] . append ( curRead ) for key in list ( ret . keys ( ) ) : ret [ key ] = b'' . join ( ret [ key ] ) if encoding : ret [ key ] = ret [ key ] . decode ( encoding ) ret [ 'returnCode' ] = returnCode return ret
10,283
https://github.com/kata198/python-subprocess2/blob/8544b0b651d8e14de9fdd597baa704182e248b01/subprocess2/simple.py#L31-L144
[ "def", "build_synchronize_decorator", "(", ")", ":", "lock", "=", "threading", ".", "Lock", "(", ")", "def", "lock_decorator", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "lock_decorated", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "lock", ":", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "lock_decorated", "return", "lock_decorator" ]
Create a context store . By default using the default configured context store but you can use a custom class if you want to using the store setting .
def create_context_store ( name = 'default' , ttl = settings . CONTEXT_DEFAULT_TTL , store = settings . CONTEXT_STORE ) -> 'BaseContextStore' : store_class = import_class ( store [ 'class' ] ) return store_class ( name = name , ttl = ttl , * * store [ 'params' ] )
10,284
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/storage/context/base.py#L34-L68
[ "def", "handle_input", "(", "self", ")", ":", "difference", "=", "self", ".", "check_state", "(", ")", "if", "not", "difference", ":", "return", "self", ".", "events", "=", "[", "]", "self", ".", "handle_new_events", "(", "difference", ")", "self", ".", "update_timeval", "(", ")", "self", ".", "events", ".", "append", "(", "self", ".", "sync_marker", "(", "self", ".", "timeval", ")", ")", "self", ".", "write_to_pipe", "(", "self", ".", "events", ")" ]
Return text in camelCase style .
def camelcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) if words : words [ 0 ] = words [ 0 ] . lower ( ) return '' . join ( words )
10,285
https://github.com/AlejandroFrias/case-conversion/blob/79ebce1403fbdac949b2da21b8f6fbe3234ddb31/case_conversion/case_conversion.py#L13-L29
[ "def", "point_lm", "(", "self", ",", "context", ")", ":", "lm", "=", "np", ".", "empty", "(", "context", ".", "shape", ",", "context", ".", "dtype", ")", "# Print the array schema", "montblanc", ".", "log", ".", "info", "(", "context", ".", "array_schema", ".", "shape", ")", "# Print the space of iteration", "montblanc", ".", "log", ".", "info", "(", "context", ".", "iter_args", ")", "(", "ls", ",", "us", ")", "=", "context", ".", "dim_extents", "(", "'npsrc'", ")", "lm", "[", ":", ",", "0", "]", "=", "0.0008", "lm", "[", ":", ",", "1", "]", "=", "0.0036", "lm", "[", ":", ",", ":", "]", "=", "0", "return", "lm" ]
Return text in dot . case style .
def dotcase ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms ) return '.' . join ( [ w . lower ( ) for w in words ] )
10,286
https://github.com/AlejandroFrias/case-conversion/blob/79ebce1403fbdac949b2da21b8f6fbe3234ddb31/case_conversion/case_conversion.py#L148-L162
[ "def", "detach_storage", "(", "self", ",", "server", ",", "address", ")", ":", "body", "=", "{", "'storage_device'", ":", "{", "'address'", ":", "address", "}", "}", "url", "=", "'/server/{0}/storage/detach'", ".", "format", "(", "server", ")", "res", "=", "self", ".", "post_request", "(", "url", ",", "body", ")", "return", "Storage", ".", "_create_storage_objs", "(", "res", "[", "'server'", "]", "[", "'storage_devices'", "]", ",", "cloud_manager", "=", "self", ")" ]
Return text in seperate words style .
def separate_words ( text , acronyms = None ) : words , _case , _sep = case_parse . parse_case ( text , acronyms , preserve_case = True ) return ' ' . join ( words )
10,287
https://github.com/AlejandroFrias/case-conversion/blob/79ebce1403fbdac949b2da21b8f6fbe3234ddb31/case_conversion/case_conversion.py#L165-L179
[ "def", "_get_observed_mmax", "(", "catalogue", ",", "config", ")", ":", "if", "config", "[", "'input_mmax'", "]", ":", "obsmax", "=", "config", "[", "'input_mmax'", "]", "if", "config", "[", "'input_mmax_uncertainty'", "]", ":", "return", "config", "[", "'input_mmax'", "]", ",", "config", "[", "'input_mmax_uncertainty'", "]", "else", ":", "raise", "ValueError", "(", "'Input mmax uncertainty must be specified!'", ")", "max_location", "=", "np", ".", "argmax", "(", "catalogue", "[", "'magnitude'", "]", ")", "obsmax", "=", "catalogue", "[", "'magnitude'", "]", "[", "max_location", "]", "cond", "=", "isinstance", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ",", "np", ".", "ndarray", ")", "and", "len", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", ">", "0", "and", "not", "np", ".", "all", "(", "np", ".", "isnan", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", ")", "if", "cond", ":", "if", "not", "np", ".", "isnan", "(", "catalogue", "[", "'sigmaMagnitude'", "]", "[", "max_location", "]", ")", ":", "return", "obsmax", ",", "catalogue", "[", "'sigmaMagnitude'", "]", "[", "max_location", "]", "else", ":", "print", "(", "'Uncertainty not given on observed Mmax\\n'", "'Taking largest magnitude uncertainty found in catalogue'", ")", "return", "obsmax", ",", "np", ".", "nanmax", "(", "catalogue", "[", "'sigmaMagnitude'", "]", ")", "elif", "config", "[", "'input_mmax_uncertainty'", "]", ":", "return", "obsmax", ",", "config", "[", "'input_mmax_uncertainty'", "]", "else", ":", "raise", "ValueError", "(", "'Input mmax uncertainty must be specified!'", ")" ]
Initialize the DB .
def init_db ( ) : import reana_db . models if not database_exists ( engine . url ) : create_database ( engine . url ) Base . metadata . create_all ( bind = engine )
10,288
https://github.com/reanahub/reana-db/blob/4efcb46d23af035689964d8c25a804c5a8f1dfc3/reana_db/database.py#L28-L33
[ "def", "_accountForNlinkEquals2", "(", "self", ",", "localFilePath", ")", ":", "fileStats", "=", "os", ".", "stat", "(", "localFilePath", ")", "assert", "fileStats", ".", "st_nlink", ">=", "self", ".", "nlinkThreshold", "with", "self", ".", "_CacheState", ".", "open", "(", "self", ")", "as", "cacheInfo", ":", "cacheInfo", ".", "sigmaJob", "-=", "fileStats", ".", "st_size", "jobState", "=", "self", ".", "_JobState", "(", "cacheInfo", ".", "jobState", "[", "self", ".", "jobID", "]", ")", "jobState", ".", "updateJobReqs", "(", "fileStats", ".", "st_size", ",", "'remove'", ")" ]
Read in the significant pathways file as a pandas . DataFrame .
def _load_significant_pathways_file ( path_to_file ) : feature_pathway_df = pd . read_table ( path_to_file , header = 0 , usecols = [ "feature" , "side" , "pathway" ] ) feature_pathway_df = feature_pathway_df . sort_values ( by = [ "feature" , "side" ] ) return feature_pathway_df
10,289
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L552-L561
[ "def", "remove_volume", "(", "self", ",", "volume_name", ")", ":", "logger", ".", "info", "(", "\"removing volume '%s'\"", ",", "volume_name", ")", "try", ":", "self", ".", "d", ".", "remove_volume", "(", "volume_name", ")", "except", "APIError", "as", "ex", ":", "if", "ex", ".", "response", ".", "status_code", "==", "requests", ".", "codes", ".", "CONFLICT", ":", "logger", ".", "debug", "(", "\"ignoring a conflict when removing volume %s\"", ",", "volume_name", ")", "else", ":", "raise", "ex" ]
Permute the pathways across features for one side in the network . Used in permute_pathways_across_features
def _pathway_feature_permutation ( pathway_feature_tuples , permutation_max_iters ) : pathways , features = [ list ( elements_at_position ) for elements_at_position in zip ( * pathway_feature_tuples ) ] original_pathways = pathways [ : ] random . shuffle ( pathways ) feature_block_locations = { } i = 0 while i < len ( pathways ) : starting_index = i current_feature = features [ i ] pathway_set = set ( ) # input is grouped by feature, so we want to keep track of the start # and end of a given "block" of the same feature--this corresponds # to all the pathways overrepresented in that feature. while i < len ( pathways ) and features [ i ] == current_feature : # check the results of the permutation. if `pathway_set` does # not contain the current pathway, we are maintaining the # necessary invariants in our permutation thus far. if pathways [ i ] not in pathway_set : pathway_set . add ( pathways [ i ] ) else : k = 0 random_pathway = None while True : # select another random pathway from the list # and get the feature to which it belongs j = random . choice ( range ( 0 , len ( pathways ) ) ) random_pathway = pathways [ j ] random_feature = features [ j ] if ( random_pathway != pathways [ i ] and random_pathway not in pathway_set ) : # if this is a feature we have not already seen, # we are done. if random_feature not in feature_block_locations : break # otherwise, look at the indices that correspond # to that feature's block of pathways feature_block_start , feature_block_end = feature_block_locations [ random_feature ] pathway_block = pathways [ feature_block_start : feature_block_end ] # make sure that the current pathway is not in # that block--ensures that we maintain the invariant # after the swap if pathways [ i ] not in pathway_block : break k += 1 if k > permutation_max_iters : print ( "Permutation step: reached the maximum " "number of iterations {0}." . format ( permutation_max_iters ) ) return None pathway_set . add ( random_pathway ) pathways [ j ] = pathways [ i ] pathways [ i ] = random_pathway i += 1 ending_index = i feature_block_locations [ current_feature ] = ( starting_index , ending_index ) if original_pathways == pathways : return None return list ( zip ( pathways , features ) )
10,290
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L568-L652
[ "def", "_check_registry_type", "(", "folder", "=", "None", ")", ":", "folder", "=", "_registry_folder", "(", "folder", ")", "default_file", "=", "os", ".", "path", ".", "join", "(", "folder", ",", "'registry_type.txt'", ")", "try", ":", "with", "open", "(", "default_file", ",", "\"r\"", ")", "as", "infile", ":", "data", "=", "infile", ".", "read", "(", ")", "data", "=", "data", ".", "strip", "(", ")", "ComponentRegistry", ".", "SetBackingStore", "(", "data", ")", "except", "IOError", ":", "pass" ]
Applied during the permutation test . Update the edges in the network to be weighted by their odds ratios . The odds ratio measures how unexpected the observed edge weight is based on the expected weight .
def weight_by_edge_odds_ratios ( self , edges_expected_weight , flag_as_significant ) : for edge_id , expected_weight in edges_expected_weight : edge_obj = self . edges [ edge_id ] edge_obj . weight /= expected_weight if edge_id in flag_as_significant : edge_obj . significant = True else : edge_obj . significant = False
10,291
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L223-L247
[ "def", "list_objects", "(", "self", ",", "resources", ")", ":", "result", "=", "[", "]", "for", "res", "in", "resources", ":", "result", ".", "append", "(", "'\\t'", ".", "join", "(", "[", "res", ".", "identifier", ",", "res", ".", "name", ",", "str", "(", "res", ".", "timestamp", ")", "[", ":", "19", "]", "]", ")", ")", "return", "result" ]
Combine this network with another network . The aggregation step takes the union of the edges in the two networks where we take the sum of weights for edges common to both networks .
def aggregate ( self , merge ) : self . features = set ( ) self . n_features += merge . n_features vertex_id_conversion = self . convert_pathway_mapping ( merge . pathways ) for edge_id , edge in merge . edges . items ( ) : edge_key = self . remapped_edge ( vertex_id_conversion , edge_id ) if edge_key in self . edges : if self . edges [ edge_key ] . which_features : self . edges [ edge_key ] . which_features = [ ] self . edges [ edge_key ] . weight += edge . weight else : vertex0_id , vertex1_id = edge_key new_edge_obj = Edge ( vertex0_id , vertex1_id , [ ] ) new_edge_obj . weight = edge . weight self . edges [ edge_key ] = new_edge_obj self . _add_edge_to_vertex ( vertex0_id , new_edge_obj ) self . _add_edge_to_vertex ( vertex1_id , new_edge_obj )
10,292
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L249-L276
[ "def", "_check_docstring_quotes", "(", "self", ",", "quote_record", ")", ":", "_", ",", "triple", ",", "row", ",", "col", "=", "quote_record", "if", "triple", "!=", "TRIPLE_QUOTE_OPTS", ".", "get", "(", "self", ".", "config", ".", "docstring_quote", ")", ":", "self", ".", "_invalid_docstring_quote", "(", "triple", ",", "row", ",", "col", ")" ]
To avoid duplicate edges where the vertex ids are reversed we maintain that the vertex ids are ordered so that the corresponding pathway names are alphabetical .
def edge_tuple ( self , vertex0_id , vertex1_id ) : pw0 = self . __getitem__ ( vertex0_id ) pw1 = self . __getitem__ ( vertex1_id ) if not pw0 or not pw1 : return None if pw0 < pw1 : return ( vertex0_id , vertex1_id ) elif pw0 > pw1 : return ( vertex1_id , vertex0_id ) else : return None
10,293
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L332-L360
[ "def", "set_samplerate", "(", "self", ",", "rate", ")", ":", "# Test and set value:", "float", "(", "rate", ")", "self", ".", "conconf", ".", "set_condition", "(", "'samplerate'", ",", "rate", ")", "if", "not", "self", ".", "no_auto", ":", "self", ".", "make_mask", "(", ")" ]
Updates self . pathways and self . n_pathways .
def add_pathway ( self , pathway ) : if pathway not in self . pathways : self . pathways [ pathway ] = self . n_pathways self . n_pathways += 1 return self . pathways [ pathway ]
10,294
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L362-L373
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Get the pathways associated with an edge .
def get_edge_pathways ( self , edge_id ) : vertex0_id , vertex1_id = edge_id pw0 = self . get_pathway_from_vertex_id ( vertex0_id ) pw1 = self . get_pathway_from_vertex_id ( vertex1_id ) if not pw0 or not pw1 : return None return ( pw0 , pw1 )
10,295
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L388-L405
[ "def", "static_cdn_url", "(", "request", ")", ":", "cdn_url", ",", "ssl_url", "=", "_get_container_urls", "(", "CumulusStaticStorage", "(", ")", ")", "static_url", "=", "settings", ".", "STATIC_URL", "return", "{", "\"STATIC_URL\"", ":", "cdn_url", "+", "static_url", ",", "\"STATIC_SSL_URL\"", ":", "ssl_url", "+", "static_url", ",", "\"LOCAL_STATIC_URL\"", ":", "static_url", ",", "}" ]
Get the vertex object that corresponds to a pathway name
def get_vertex_obj_from_pathway ( self , pathway ) : if pathway in self . pathways : vertex_id = self . pathways [ pathway ] return self . vertices [ vertex_id ] else : return None
10,296
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L422-L437
[ "def", "get_resource_id", "(", "self", ")", ":", "# Implemented from template for osid.resource.Resource.get_avatar_id_template", "if", "not", "bool", "(", "self", ".", "_my_map", "[", "'resourceId'", "]", ")", ":", "raise", "errors", ".", "IllegalState", "(", "'this Authorization has no resource'", ")", "else", ":", "return", "Id", "(", "self", ".", "_my_map", "[", "'resourceId'", "]", ")" ]
Get the pathways adjacent to this pathway in the network
def get_adjacent_pathways ( self , pathway ) : vertex_id = self . pathways [ pathway ] adjacent = self . vertices [ vertex_id ] . get_adjacent_vertex_ids ( ) adjacent_pathways = [ ] for adjacent_id in adjacent : adjacent_pathways . append ( self . get_pathway_from_vertex_id ( adjacent_id ) ) return adjacent_pathways
10,297
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L439-L456
[ "def", "calc_regenerated", "(", "self", ",", "lastvotetime", ")", ":", "delta", "=", "datetime", ".", "utcnow", "(", ")", "-", "datetime", ".", "strptime", "(", "lastvotetime", ",", "'%Y-%m-%dT%H:%M:%S'", ")", "td", "=", "delta", ".", "days", "ts", "=", "delta", ".", "seconds", "tt", "=", "(", "td", "*", "86400", ")", "+", "ts", "return", "tt", "*", "10000", "/", "86400", "/", "5" ]
Conversion of the network to a pandas . DataFrame .
def to_dataframe ( self , drop_weights_below = 0 , whitelist = None ) : network_df_cols = [ "pw0" , "pw1" , "weight" ] if self . features : network_df_cols . append ( "features" ) network_df = pd . DataFrame ( columns = network_df_cols ) idx = 0 edge_pathways = set ( ) for ( v0 , v1 ) , edge_obj in self . edges . items ( ) : if ( edge_obj . weight > drop_weights_below and ( whitelist is None or ( v0 , v1 ) in whitelist ) ) : row = [ self . __getitem__ ( v0 ) , self . __getitem__ ( v1 ) , edge_obj . weight ] edge_pathways . add ( v0 ) edge_pathways . add ( v1 ) if self . features : features = edge_obj . features_to_string ( ) row . append ( features ) network_df . loc [ idx ] = row idx += 1 # faster to append by index. network_df = network_df . sort_values ( by = [ "weight" ] , ascending = False ) print ( "The pathway co-occurrence network " "contains {0} pathways." . format ( len ( edge_pathways ) ) ) return network_df
10,298
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L458-L501
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Adds the edge to the Vertex object s edges dictionary
def _add_edge_to_vertex ( self , vertex_id , edge ) : connected_to = edge . connected_to ( vertex_id ) if vertex_id not in self . vertices : vertex_obj = Vertex ( vertex_id ) self . vertices [ vertex_id ] = vertex_obj self . vertices [ vertex_id ] . edges [ connected_to ] = edge . weight
10,299
https://github.com/greenelab/PathCORE-T/blob/9d079d5ebffea2fe9fb9ab557588d51ad67d2c9c/pathcore/network.py#L503-L510
[ "def", "read_rawFilesTable", "(", "filename", ")", ":", "exp", "=", "pd", ".", "read_table", "(", "filename", ")", "expected_columns", "=", "{", "'File'", ",", "'Exists'", ",", "'Size'", ",", "'Data format'", ",", "'Parameter group'", ",", "'Experiment'", ",", "'Fraction'", "}", "found_columns", "=", "set", "(", "exp", ".", "columns", ")", "if", "len", "(", "expected_columns", "-", "found_columns", ")", ">", "0", ":", "message", "=", "'\\n'", ".", "join", "(", "[", "'The raw files table has the wrong format!'", ",", "'It should contain columns:'", ",", "', '", ".", "join", "(", "sorted", "(", "expected_columns", ")", ")", ",", "'Found columns:'", ",", "', '", ".", "join", "(", "sorted", "(", "found_columns", ")", ")", "]", ")", "raise", "ValueError", "(", "message", ")", "exp", "[", "'Raw file'", "]", "=", "exp", "[", "'File'", "]", ".", "apply", "(", "path", ".", "basename", ")", ".", "apply", "(", "path", ".", "splitext", ")", ".", "str", ".", "get", "(", "0", ")", "exp", "[", "'Experiment'", "]", "=", "exp", "[", "'Experiment'", "]", ".", "astype", "(", "str", ")", "return", "exp" ]