query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Adds the user for this loan to a user field .
def setUser ( self , * args , * * kwargs ) : try : user = self . mambuuserclass ( entid = self [ 'assignedUserKey' ] , * args , * * kwargs ) except KeyError as kerr : err = MambuError ( "La cuenta %s no tiene asignado un usuario" % self [ 'id' ] ) err . noUser = True raise err except AttributeError as ae : from . mambuuser import MambuUser self . mambuuserclass = MambuUser try : user = self . mambuuserclass ( entid = self [ 'assignedUserKey' ] , * args , * * kwargs ) except KeyError as kerr : err = MambuError ( "La cuenta %s no tiene asignado un usuario" % self [ 'id' ] ) err . noUser = True raise err self [ 'user' ] = user return 1
10,600
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L183-L208
[ "def", "get_published", "(", "name", ",", "config_path", "=", "_DEFAULT_CONFIG_PATH", ",", "endpoint", "=", "''", ",", "prefix", "=", "None", ")", ":", "_validate_config", "(", "config_path", ")", "ret", "=", "dict", "(", ")", "sources", "=", "list", "(", ")", "cmd", "=", "[", "'publish'", ",", "'show'", ",", "'-config={}'", ".", "format", "(", "config_path", ")", ",", "name", "]", "if", "prefix", ":", "cmd", ".", "append", "(", "'{}:{}'", ".", "format", "(", "endpoint", ",", "prefix", ")", ")", "cmd_ret", "=", "_cmd_run", "(", "cmd", ")", "ret", "=", "_parse_show_output", "(", "cmd_ret", "=", "cmd_ret", ")", "if", "ret", ":", "log", ".", "debug", "(", "'Found published repository: %s'", ",", "name", ")", "else", ":", "log", ".", "debug", "(", "'Unable to find published repository: %s'", ",", "name", ")", "return", "ret" ]
Adds the product for this loan to a product field .
def setProduct ( self , cache = False , * args , * * kwargs ) : if cache : try : prods = self . allmambuproductsclass ( * args , * * kwargs ) except AttributeError as ae : from . mambuproduct import AllMambuProducts self . allmambuproductsclass = AllMambuProducts prods = self . allmambuproductsclass ( * args , * * kwargs ) for prod in prods : if prod [ 'encodedKey' ] == self [ 'productTypeKey' ] : self [ 'product' ] = prod try : # asked for cache, but cache was originally empty prods . noinit except AttributeError : return 1 return 0 try : product = self . mambuproductclass ( entid = self [ 'productTypeKey' ] , * args , * * kwargs ) except AttributeError as ae : from . mambuproduct import MambuProduct self . mambuproductclass = MambuProduct product = self . mambuproductclass ( entid = self [ 'productTypeKey' ] , * args , * * kwargs ) self [ 'product' ] = product return 1
10,601
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L211-L247
[ "def", "invoke_common_options", "(", "f", ")", ":", "invoke_options", "=", "[", "template_click_option", "(", ")", ",", "click", ".", "option", "(", "'--env-vars'", ",", "'-n'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "True", ")", ",", "help", "=", "\"JSON file containing values for Lambda function's environment variables.\"", ")", ",", "parameter_override_click_option", "(", ")", ",", "click", ".", "option", "(", "'--debug-port'", ",", "'-d'", ",", "help", "=", "\"When specified, Lambda function container will start in debug mode and will expose this \"", "\"port on localhost.\"", ",", "envvar", "=", "\"SAM_DEBUG_PORT\"", ")", ",", "click", ".", "option", "(", "'--debugger-path'", ",", "help", "=", "\"Host path to a debugger that will be mounted into the Lambda container.\"", ")", ",", "click", ".", "option", "(", "'--debug-args'", ",", "help", "=", "\"Additional arguments to be passed to the debugger.\"", ",", "envvar", "=", "\"DEBUGGER_ARGS\"", ")", ",", "click", ".", "option", "(", "'--docker-volume-basedir'", ",", "'-v'", ",", "envvar", "=", "\"SAM_DOCKER_VOLUME_BASEDIR\"", ",", "help", "=", "\"Specifies the location basedir where the SAM file exists. If the Docker is running on \"", "\"a remote machine, you must mount the path where the SAM file exists on the docker machine \"", "\"and modify this value to match the remote machine.\"", ")", ",", "click", ".", "option", "(", "'--log-file'", ",", "'-l'", ",", "help", "=", "\"logfile to send runtime logs to.\"", ")", ",", "click", ".", "option", "(", "'--layer-cache-basedir'", ",", "type", "=", "click", ".", "Path", "(", "exists", "=", "False", ",", "file_okay", "=", "False", ")", ",", "envvar", "=", "\"SAM_LAYER_CACHE_BASEDIR\"", ",", "help", "=", "\"Specifies the location basedir where the Layers your template uses will be downloaded to.\"", ",", "default", "=", "get_default_layer_cache_dir", "(", ")", ")", ",", "]", "+", "docker_click_options", "(", ")", "+", "[", "click", ".", "option", "(", "'--force-image-build'", ",", "is_flag", "=", "True", ",", "help", "=", "'Specify whether CLI should rebuild the image used for invoking functions with layers.'", ",", "envvar", "=", "'SAM_FORCE_IMAGE_BUILD'", ",", "default", "=", "False", ")", ",", "]", "# Reverse the list to maintain ordering of options in help text printed with --help", "for", "option", "in", "reversed", "(", "invoke_options", ")", ":", "option", "(", "f", ")", "return", "f" ]
Gets the loan details for every client holder of the account .
def getClientDetails ( self , * args , * * kwargs ) : loannames = [ ] holder = kwargs [ 'holder' ] for client in holder [ 'clients' ] : loannames . append ( { 'id' : client [ 'id' ] , 'name' : client [ 'name' ] , 'client' : client , 'amount' : self [ 'loanAmount' ] } ) return loannames
10,602
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuloan.py#L412-L444
[ "def", "if_sqlserver_disable_constraints_triggers", "(", "session", ":", "SqlASession", ",", "tablename", ":", "str", ")", "->", "None", ":", "with", "if_sqlserver_disable_constraints", "(", "session", ",", "tablename", ")", ":", "with", "if_sqlserver_disable_triggers", "(", "session", ",", "tablename", ")", ":", "yield" ]
Convert keyword to fields .
def get_fields_for_keyword ( self , keyword , mode = 'a' ) : field = self . keyword_to_fields . get ( keyword , keyword ) if isinstance ( field , dict ) : return field [ mode ] elif isinstance ( field , ( list , tuple ) ) : return field return [ field ]
10,603
https://github.com/inveniosoftware/invenio-query-parser/blob/21a2c36318003ff52d2e18e7196bb420db8ecb4b/invenio_query_parser/contrib/elasticsearch/walkers/dsl.py#L46-L53
[ "def", "gc_velocity_update", "(", "particle", ",", "social", ",", "state", ")", ":", "gbest", "=", "state", ".", "swarm", "[", "gbest_idx", "(", "state", ".", "swarm", ")", "]", ".", "position", "if", "not", "np", ".", "array_equal", "(", "gbest", ",", "particle", ".", "position", ")", ":", "return", "std_velocity", "(", "particle", ",", "social", ",", "state", ")", "rho", "=", "state", ".", "params", "[", "'rho'", "]", "inertia", "=", "state", ".", "params", "[", "'inertia'", "]", "v_max", "=", "state", ".", "params", "[", "'v_max'", "]", "size", "=", "particle", ".", "position", ".", "size", "r2", "=", "state", ".", "rng", ".", "uniform", "(", "0.0", ",", "1.0", ",", "size", ")", "velocity", "=", "__gc_velocity_equation__", "(", "inertia", ",", "rho", ",", "r2", ",", "particle", ",", "gbest", ")", "return", "__clamp__", "(", "velocity", ",", "v_max", ")" ]
Merge dict b into a
def merge_dict ( a , b , path = None ) : if not path : path = [ ] for key in b : if key in a : if isinstance ( a [ key ] , dict ) and isinstance ( b [ key ] , dict ) : merge_dict ( a [ key ] , b [ key ] , path + [ str ( key ) ] ) else : continue else : a [ key ] = b [ key ] return a
10,604
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/sirbot/utils/__init__.py#L8-L22
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
A flexible method to get a date object .
def make_date ( obj : Union [ date , datetime , Text ] , timezone : tzinfo = None ) : if isinstance ( obj , datetime ) : if hasattr ( obj , 'astimezone' ) and timezone : obj = obj . astimezone ( timezone ) return obj . date ( ) elif isinstance ( obj , date ) : return obj elif isinstance ( obj , str ) : return make_date ( parse_date ( obj ) , timezone )
10,605
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/_formatter.py#L22-L40
[ "def", "_unregister_bundle_factories", "(", "self", ",", "bundle", ")", ":", "# type: (Bundle) -> None", "with", "self", ".", "__factories_lock", ":", "# Find out which factories must be removed", "to_remove", "=", "[", "factory_name", "for", "factory_name", "in", "self", ".", "__factories", "if", "self", ".", "get_factory_bundle", "(", "factory_name", ")", "is", "bundle", "]", "# Remove all of them", "for", "factory_name", "in", "to_remove", ":", "try", ":", "self", ".", "unregister_factory", "(", "factory_name", ")", "except", "ValueError", "as", "ex", ":", "_logger", ".", "warning", "(", "\"Error unregistering factory '%s': %s\"", ",", "factory_name", ",", "ex", ")" ]
Format the date using Babel
def format_date ( self , value , format_ ) : date_ = make_date ( value ) return dates . format_date ( date_ , format_ , locale = self . lang )
10,606
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/_formatter.py#L80-L85
[ "def", "fixpairs", "(", "args", ")", ":", "p", "=", "OptionParser", "(", "fixpairs", ".", "__doc__", ")", "opts", ",", "args", "=", "p", ".", "parse_args", "(", "args", ")", "if", "len", "(", "args", ")", "!=", "3", ":", "sys", ".", "exit", "(", "not", "p", ".", "print_help", "(", ")", ")", "pairsfile", ",", "sep", ",", "sd", "=", "args", "newpairsfile", "=", "pairsfile", ".", "rsplit", "(", "\".\"", ",", "1", ")", "[", "0", "]", "+", "\".new.pairs\"", "sep", "=", "int", "(", "sep", ")", "sd", "=", "int", "(", "sd", ")", "p", "=", "PairsFile", "(", "pairsfile", ")", "p", ".", "fixLibraryStats", "(", "sep", ",", "sd", ")", "p", ".", "write", "(", "newpairsfile", ")" ]
Format the datetime using Babel
def format_datetime ( self , value , format_ ) : date_ = make_datetime ( value ) return dates . format_datetime ( date_ , format_ , locale = self . lang )
10,607
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/_formatter.py#L87-L92
[ "def", "_init_itemid2name", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ".", "args", ",", "'id2sym'", ")", ":", "return", "None", "fin_id2sym", "=", "self", ".", "args", ".", "id2sym", "if", "fin_id2sym", "is", "not", "None", "and", "os", ".", "path", ".", "exists", "(", "fin_id2sym", ")", ":", "id2sym", "=", "{", "}", "cmpl", "=", "re", ".", "compile", "(", "r'^\\s*(\\S+)[\\s,;]+(\\S+)'", ")", "with", "open", "(", "fin_id2sym", ")", "as", "ifstrm", ":", "for", "line", "in", "ifstrm", ":", "mtch", "=", "cmpl", ".", "search", "(", "line", ")", "if", "mtch", ":", "id2sym", "[", "mtch", ".", "group", "(", "1", ")", "]", "=", "mtch", ".", "group", "(", "2", ")", "return", "id2sym" ]
Provide the additional formatters for localization .
def format_field ( self , value , spec ) : if spec . startswith ( 'date:' ) : _ , format_ = spec . split ( ':' , 1 ) return self . format_date ( value , format_ ) elif spec . startswith ( 'datetime:' ) : _ , format_ = spec . split ( ':' , 1 ) return self . format_datetime ( value , format_ ) elif spec == 'number' : return self . format_number ( value ) else : return super ( I18nFormatter , self ) . format_field ( value , spec )
10,608
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/i18n/_formatter.py#L100-L114
[ "def", "_read_loop_websocket", "(", "self", ")", ":", "while", "self", ".", "state", "==", "'connected'", ":", "p", "=", "None", "try", ":", "p", "=", "self", ".", "ws", ".", "recv", "(", ")", "except", "websocket", ".", "WebSocketConnectionClosedException", ":", "self", ".", "logger", ".", "warning", "(", "'WebSocket connection was closed, aborting'", ")", "self", ".", "queue", ".", "put", "(", "None", ")", "break", "except", "Exception", "as", "e", ":", "self", ".", "logger", ".", "info", "(", "'Unexpected error \"%s\", aborting'", ",", "str", "(", "e", ")", ")", "self", ".", "queue", ".", "put", "(", "None", ")", "break", "if", "isinstance", "(", "p", ",", "six", ".", "text_type", ")", ":", "# pragma: no cover", "p", "=", "p", ".", "encode", "(", "'utf-8'", ")", "pkt", "=", "packet", ".", "Packet", "(", "encoded_packet", "=", "p", ")", "self", ".", "_receive_packet", "(", "pkt", ")", "self", ".", "logger", ".", "info", "(", "'Waiting for write loop task to end'", ")", "self", ".", "write_loop_task", ".", "join", "(", ")", "self", ".", "logger", ".", "info", "(", "'Waiting for ping loop task to end'", ")", "self", ".", "ping_loop_event", ".", "set", "(", ")", "self", ".", "ping_loop_task", ".", "join", "(", ")", "if", "self", ".", "state", "==", "'connected'", ":", "self", ".", "_trigger_event", "(", "'disconnect'", ",", "run_async", "=", "False", ")", "try", ":", "connected_clients", ".", "remove", "(", "self", ")", "except", "ValueError", ":", "# pragma: no cover", "pass", "self", ".", "_reset", "(", ")", "self", ".", "logger", ".", "info", "(", "'Exiting read loop task'", ")" ]
Decode the given value reverting % - encoded groups .
def _decode ( cls , value ) : value = cls . _DEC_RE . sub ( lambda x : '%c' % int ( x . group ( 1 ) , 16 ) , value ) return json . loads ( value )
10,609
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L153-L157
[ "def", "mprotect", "(", "self", ",", "lpAddress", ",", "dwSize", ",", "flNewProtect", ")", ":", "hProcess", "=", "self", ".", "get_handle", "(", "win32", ".", "PROCESS_VM_OPERATION", ")", "return", "win32", ".", "VirtualProtectEx", "(", "hProcess", ",", "lpAddress", ",", "dwSize", ",", "flNewProtect", ")" ]
Decode a bucket key into a BucketKey instance .
def decode ( cls , key ) : # Determine bucket key version prefix , sep , param_str = key . partition ( ':' ) if sep != ':' or prefix not in cls . _prefix_to_version : raise ValueError ( "%r is not a bucket key" % key ) version = cls . _prefix_to_version [ prefix ] # Take the parameters apart... parts = param_str . split ( '/' ) uuid = parts . pop ( 0 ) params = { } for part in parts : name , sep , value = part . partition ( '=' ) # Make sure it's well-formed if sep != '=' : raise ValueError ( "Cannot interpret parameter expression %r" % part ) params [ name ] = cls . _decode ( value ) # Return a BucketKey return cls ( uuid , params , version = version )
10,610
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L198-L229
[ "def", "_recv_msg", "(", "self", ")", ":", "command", "=", "ord", "(", "recv_blocking", "(", "self", ".", "_soc", ",", "1", ")", ")", "msglen", "=", "recv_blocking", "(", "self", ".", "_soc", ",", "4", ")", "msglen", "=", "(", "(", "msglen", "[", "0", "]", "<<", "24", ")", "+", "(", "msglen", "[", "1", "]", "<<", "16", ")", "+", "(", "msglen", "[", "2", "]", "<<", "8", ")", "+", "msglen", "[", "3", "]", ")", "msg", "=", "recv_blocking", "(", "self", ".", "_soc", ",", "msglen", ")", "return", "command", ",", "msg" ]
Helper method to determine if a summarize record should be added .
def need_summary ( self , now , max_updates , max_age ) : # Handle the case where an old summarize record exists if self . summarized is True and self . last_summarize_ts + max_age <= now : return True return self . summarized is False and self . updates >= max_updates
10,611
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L403-L423
[ "def", "releaseNativeOverlayHandle", "(", "self", ",", "ulOverlayHandle", ",", "pNativeTextureHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "releaseNativeOverlayHandle", "result", "=", "fn", "(", "ulOverlayHandle", ",", "pNativeTextureHandle", ")", "return", "result" ]
Return a dict representing this bucket .
def dehydrate ( self ) : # Only concerned about very specific attributes result = { } for attr in self . attrs : result [ attr ] = getattr ( self , attr ) return result
10,612
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L464-L472
[ "def", "new_messages_handler", "(", "stream", ")", ":", "# TODO: handle no user found exception\r", "while", "True", ":", "packet", "=", "yield", "from", "stream", ".", "get", "(", ")", "session_id", "=", "packet", ".", "get", "(", "'session_key'", ")", "msg", "=", "packet", ".", "get", "(", "'message'", ")", "username_opponent", "=", "packet", ".", "get", "(", "'username'", ")", "if", "session_id", "and", "msg", "and", "username_opponent", ":", "user_owner", "=", "get_user_from_session", "(", "session_id", ")", "if", "user_owner", ":", "user_opponent", "=", "get_user_model", "(", ")", ".", "objects", ".", "get", "(", "username", "=", "username_opponent", ")", "dialog", "=", "get_dialogs_with_user", "(", "user_owner", ",", "user_opponent", ")", "if", "len", "(", "dialog", ")", ">", "0", ":", "# Save the message\r", "msg", "=", "models", ".", "Message", ".", "objects", ".", "create", "(", "dialog", "=", "dialog", "[", "0", "]", ",", "sender", "=", "user_owner", ",", "text", "=", "packet", "[", "'message'", "]", ",", "read", "=", "False", ")", "packet", "[", "'created'", "]", "=", "msg", ".", "get_formatted_create_datetime", "(", ")", "packet", "[", "'sender_name'", "]", "=", "msg", ".", "sender", ".", "username", "packet", "[", "'message_id'", "]", "=", "msg", ".", "id", "# Send the message\r", "connections", "=", "[", "]", "# Find socket of the user which sent the message\r", "if", "(", "user_owner", ".", "username", ",", "user_opponent", ".", "username", ")", "in", "ws_connections", ":", "connections", ".", "append", "(", "ws_connections", "[", "(", "user_owner", ".", "username", ",", "user_opponent", ".", "username", ")", "]", ")", "# Find socket of the opponent\r", "if", "(", "user_opponent", ".", "username", ",", "user_owner", ".", "username", ")", "in", "ws_connections", ":", "connections", ".", "append", "(", "ws_connections", "[", "(", "user_opponent", ".", "username", ",", "user_owner", ".", "username", ")", "]", ")", "else", ":", "# Find sockets of people who the opponent is talking with\r", "opponent_connections", "=", "list", "(", "filter", "(", "lambda", "x", ":", "x", "[", "0", "]", "==", "user_opponent", ".", "username", ",", "ws_connections", ")", ")", "opponent_connections_sockets", "=", "[", "ws_connections", "[", "i", "]", "for", "i", "in", "opponent_connections", "]", "connections", ".", "extend", "(", "opponent_connections_sockets", ")", "yield", "from", "fanout_message", "(", "connections", ",", "packet", ")", "else", ":", "pass", "# no dialog found\r", "else", ":", "pass", "# no user_owner\r", "else", ":", "pass" ]
Determine delay until next request .
def delay ( self , params , now = None ) : if now is None : now = time . time ( ) # Initialize last... if not self . last : self . last = now elif now < self . last : now = self . last # How much has leaked out? leaked = now - self . last # Update the last message time self . last = now # Update the water level self . level = max ( self . level - leaked , 0 ) # Are we too full? difference = self . level + self . limit . cost - self . limit . unit_value if difference >= self . eps : self . next = now + difference return difference # OK, raise the water level and set next to an appropriate # value self . level += self . limit . cost self . next = now return None
10,613
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L474-L506
[ "def", "register", "(", "self", ",", "model", "=", "None", ",", "include_fields", "=", "[", "]", ",", "exclude_fields", "=", "[", "]", ",", "mapping_fields", "=", "{", "}", ")", ":", "def", "registrar", "(", "cls", ")", ":", "\"\"\"Register models for a given class.\"\"\"", "if", "not", "issubclass", "(", "cls", ",", "Model", ")", ":", "raise", "TypeError", "(", "\"Supplied model is not a valid model.\"", ")", "self", ".", "_registry", "[", "cls", "]", "=", "{", "'include_fields'", ":", "include_fields", ",", "'exclude_fields'", ":", "exclude_fields", ",", "'mapping_fields'", ":", "mapping_fields", ",", "}", "self", ".", "_connect_signals", "(", "cls", ")", "# We need to return the class, as the decorator is basically", "# syntactic sugar for:", "# MyClass = auditlog.register(MyClass)", "return", "cls", "if", "model", "is", "None", ":", "# If we're being used as a decorator, return a callable with the", "# wrapper.", "return", "lambda", "cls", ":", "registrar", "(", "cls", ")", "else", ":", "# Otherwise, just register the model.", "registrar", "(", "model", ")" ]
Return remaining messages before limiting .
def messages ( self ) : return int ( math . floor ( ( ( self . limit . unit_value - self . level ) / self . limit . unit_value ) * self . limit . value ) )
10,614
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L509-L513
[ "def", "sync_blockchain", "(", "working_dir", ",", "bt_opts", ",", "last_block", ",", "server_state", ",", "expected_snapshots", "=", "{", "}", ",", "*", "*", "virtualchain_args", ")", ":", "subdomain_index", "=", "server_state", "[", "'subdomains'", "]", "atlas_state", "=", "server_state", "[", "'atlas'", "]", "# make this usable even if we haven't explicitly configured virtualchain ", "impl", "=", "sys", ".", "modules", "[", "__name__", "]", "log", ".", "info", "(", "\"Synchronizing database {} up to block {}\"", ".", "format", "(", "working_dir", ",", "last_block", ")", ")", "# NOTE: this is the only place where a read-write handle should be created,", "# since this is the only place where the db should be modified.", "new_db", "=", "BlockstackDB", ".", "borrow_readwrite_instance", "(", "working_dir", ",", "last_block", ",", "expected_snapshots", "=", "expected_snapshots", ")", "# propagate runtime state to virtualchain callbacks", "new_db", ".", "subdomain_index", "=", "subdomain_index", "new_db", ".", "atlas_state", "=", "atlas_state", "rc", "=", "virtualchain", ".", "sync_virtualchain", "(", "bt_opts", ",", "last_block", ",", "new_db", ",", "expected_snapshots", "=", "expected_snapshots", ",", "*", "*", "virtualchain_args", ")", "BlockstackDB", ".", "release_readwrite_instance", "(", "new_db", ",", "last_block", ")", "return", "rc" ]
Return a dict representing this limit .
def dehydrate ( self ) : # Only concerned about very specific attributes result = dict ( limit_class = self . _limit_full_name ) for attr in self . attrs : # Using getattr allows the properties to come into play result [ attr ] = getattr ( self , attr ) return result
10,615
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L729-L738
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Given a bucket key load the corresponding bucket .
def load ( self , key ) : # Turn the key into a BucketKey if isinstance ( key , basestring ) : key = BucketKey . decode ( key ) # Make sure the uuids match if key . uuid != self . uuid : raise ValueError ( "%s is not a bucket corresponding to this limit" % key ) # If the key is a version 1 key, load it straight from the # database if key . version == 1 : raw = self . db . get ( str ( key ) ) if raw is None : return self . bucket_class ( self . db , self , str ( key ) ) return self . bucket_class . hydrate ( self . db , msgpack . loads ( raw ) , self , str ( key ) ) # OK, use a BucketLoader records = self . db . lrange ( str ( key ) , 0 , - 1 ) loader = BucketLoader ( self . bucket_class , self . db , self , str ( key ) , records ) return loader . bucket
10,616
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L780-L813
[ "def", "image_to_data", "(", "image", ",", "lang", "=", "None", ",", "config", "=", "''", ",", "nice", "=", "0", ",", "output_type", "=", "Output", ".", "STRING", ")", ":", "if", "get_tesseract_version", "(", ")", "<", "'3.05'", ":", "raise", "TSVNotSupported", "(", ")", "config", "=", "'{} {}'", ".", "format", "(", "'-c tessedit_create_tsv=1'", ",", "config", ".", "strip", "(", ")", ")", ".", "strip", "(", ")", "args", "=", "[", "image", ",", "'tsv'", ",", "lang", ",", "config", ",", "nice", "]", "return", "{", "Output", ".", "BYTES", ":", "lambda", ":", "run_and_get_output", "(", "*", "(", "args", "+", "[", "True", "]", ")", ")", ",", "Output", ".", "DATAFRAME", ":", "lambda", ":", "get_pandas_output", "(", "args", "+", "[", "True", "]", ")", ",", "Output", ".", "DICT", ":", "lambda", ":", "file_to_dict", "(", "run_and_get_output", "(", "*", "args", ")", ",", "'\\t'", ",", "-", "1", ")", ",", "Output", ".", "STRING", ":", "lambda", ":", "run_and_get_output", "(", "*", "args", ")", ",", "}", "[", "output_type", "]", "(", ")" ]
Given a bucket key compute the parameters used to compute that key .
def decode ( self , key ) : # Parse the bucket key key = BucketKey . decode ( key ) # Make sure the uuids match if key . uuid != self . uuid : raise ValueError ( "%s is not a bucket corresponding to this limit" % key ) return key . params
10,617
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L815-L835
[ "def", "get_dbs", "(", ")", ":", "url", "=", "posixpath", ".", "join", "(", "config", ".", "db_index_url", ",", "'DBS'", ")", "response", "=", "requests", ".", "get", "(", "url", ")", "dbs", "=", "response", ".", "content", ".", "decode", "(", "'ascii'", ")", ".", "splitlines", "(", ")", "dbs", "=", "[", "re", ".", "sub", "(", "'\\t{2,}'", ",", "'\\t'", ",", "line", ")", ".", "split", "(", "'\\t'", ")", "for", "line", "in", "dbs", "]", "return", "dbs" ]
Performs final filtering of the request to determine if this limit applies . Returns False if the limit does not apply or if the call should not be limited or True to apply the limit .
def _filter ( self , environ , params ) : # Search for required query arguments if self . queries : # No query string available if 'QUERY_STRING' not in environ : return False # Extract the list of provided query arguments from the # QUERY_STRING available = set ( qstr . partition ( '=' ) [ 0 ] for qstr in environ [ 'QUERY_STRING' ] . split ( '&' ) ) # Check if we have the required query arguments required = set ( self . queries ) if not required . issubset ( available ) : return False # Use only the parameters listed in use; we'll add the others # back later unused = { } for key , value in params . items ( ) : if key not in self . use : unused [ key ] = value # Do this in a separate step so we avoid changing a # dictionary during traversal for key in unused : del params [ key ] # First, we need to set up any additional params required to # get the bucket. If the DeferLimit exception is thrown, no # further processing is performed. try : additional = self . filter ( environ , params , unused ) or { } except DeferLimit : return False # Compute the bucket key key = self . key ( params ) # Update the parameters... params . update ( unused ) params . update ( additional ) # Get the current time now = time . time ( ) # Allow up to a minute to mutate the bucket record. If no # bucket exists currently, this is essentially a no-op, and # the bucket won't expire anyway, once the update record is # pushed. self . db . expire ( key , 60 ) # Push an update record update_uuid = str ( uuid . uuid4 ( ) ) update = { 'uuid' : update_uuid , 'update' : { 'params' : params , 'time' : now , } , } self . db . rpush ( key , msgpack . dumps ( update ) ) # Now suck in the bucket records = self . db . lrange ( key , 0 , - 1 ) loader = BucketLoader ( self . bucket_class , self . db , self , key , records ) # Determine if we should initialize the compactor algorithm on # this bucket if 'turnstile.conf' in environ : config = environ [ 'turnstile.conf' ] [ 'compactor' ] try : max_updates = int ( config [ 'max_updates' ] ) except ( KeyError , ValueError ) : max_updates = None try : max_age = int ( config [ 'max_age' ] ) except ( KeyError , ValueError ) : max_age = 600 if max_updates and loader . need_summary ( now , max_updates , max_age ) : # Add a summary record; we want to do this before # instructing the compactor to compact. If we did the # compactor instruction first, and a crash occurred # before adding the summarize record, the lack of # quiesence could cause two compactor threads to run # on the same bucket, leading to a race condition that # could corrupt the bucket. With this ordering, if a # crash occurs before the compactor instruction, the # maximum aging applied to summarize records will # cause this logic to eventually be retriggered, which # should allow the compactor instruction to be issued. summarize = dict ( summarize = now , uuid = str ( uuid . uuid4 ( ) ) ) self . db . rpush ( key , msgpack . dumps ( summarize ) ) # Instruct the compactor to compact this record compactor_key = config . get ( 'compactor_key' , 'compactor' ) self . db . zadd ( compactor_key , int ( math . ceil ( now ) ) , key ) # Set the expire on the bucket self . db . expireat ( key , loader . bucket . expire ) # If we found a delay, store the particulars in the # environment; this will later be sorted and an error message # corresponding to the longest delay returned. if loader . delay is not None : environ . setdefault ( 'turnstile.delay' , [ ] ) environ [ 'turnstile.delay' ] . append ( ( loader . delay , self , loader . bucket ) ) # Finally, if desired, add the bucket key to a desired # database set set_name = environ . get ( 'turnstile.bucket_set' ) if set_name : self . db . zadd ( set_name , loader . bucket . expire , key ) # Should we continue the route scan? return not self . continue_scan
10,618
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L849-L972
[ "def", "to_json", "(", "self", ")", ":", "return", "json", ".", "dumps", "(", "{", "\"statistics\"", ":", "self", ".", "get_statistics", "(", ")", ",", "\"authors\"", ":", "[", "json", ".", "loads", "(", "author", ".", "to_json", "(", ")", ")", "for", "author", "in", "self", ".", "get_authors", "(", ")", "]", "}", ",", "indent", "=", "2", ")" ]
Formats a response entity . Returns a tuple of the desired status code and the formatted entity . The default status code is passed in as is a dictionary of headers .
def format ( self , status , headers , environ , bucket , delay ) : # This is a default response entity, which can be overridden # by limit subclasses. entity = ( "This request was rate-limited. " "Please retry your request after %s." % time . strftime ( "%Y-%m-%dT%H:%M:%SZ" , time . gmtime ( bucket . next ) ) ) headers [ 'Content-Type' ] = 'text/plain' return status , entity
10,619
https://github.com/klmitch/turnstile/blob/8fe9a359b45e505d3192ab193ecf9be177ab1a17/turnstile/limits.py#L993-L1023
[ "def", "is_writable_by_others", "(", "filename", ")", ":", "mode", "=", "os", ".", "stat", "(", "filename", ")", "[", "stat", ".", "ST_MODE", "]", "return", "mode", "&", "stat", ".", "S_IWOTH" ]
Removes common prefix from a collection of strings
def drop_prefix ( strings ) : strings_without_extensions = [ s . split ( "." , 2 ) [ 0 ] for s in strings ] if len ( strings_without_extensions ) == 1 : return [ os . path . basename ( strings_without_extensions [ 0 ] ) ] prefix_len = len ( os . path . commonprefix ( strings_without_extensions ) ) result = [ string [ prefix_len : ] for string in strings_without_extensions ] if len ( set ( result ) ) != len ( strings ) : # If these operations resulted in a collision, just return the original # strings. return strings return result
10,620
https://github.com/openvax/varlens/blob/715d3ede5893757b2fcba4117515621bca7b1e5d/varlens/util.py#L52-L68
[ "def", "json_engine", "(", "self", ",", "req", ")", ":", "# pylint: disable=R0201,W0613", "try", ":", "return", "stats", ".", "engine_data", "(", "config", ".", "engine", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "torrent_exc", ":", "raise", "exc", ".", "HTTPInternalServerError", "(", "str", "(", "torrent_exc", ")", ")" ]
Return how many nodes this contains including self .
def count ( self ) : if self . _nodes is None : return 1 return sum ( i . count ( ) for i in self . _nodes )
10,621
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L107-L111
[ "def", "quandl_bundle", "(", "environ", ",", "asset_db_writer", ",", "minute_bar_writer", ",", "daily_bar_writer", ",", "adjustment_writer", ",", "calendar", ",", "start_session", ",", "end_session", ",", "cache", ",", "show_progress", ",", "output_dir", ")", ":", "api_key", "=", "environ", ".", "get", "(", "'QUANDL_API_KEY'", ")", "if", "api_key", "is", "None", ":", "raise", "ValueError", "(", "\"Please set your QUANDL_API_KEY environment variable and retry.\"", ")", "raw_data", "=", "fetch_data_table", "(", "api_key", ",", "show_progress", ",", "environ", ".", "get", "(", "'QUANDL_DOWNLOAD_ATTEMPTS'", ",", "5", ")", ")", "asset_metadata", "=", "gen_asset_metadata", "(", "raw_data", "[", "[", "'symbol'", ",", "'date'", "]", "]", ",", "show_progress", ")", "asset_db_writer", ".", "write", "(", "asset_metadata", ")", "symbol_map", "=", "asset_metadata", ".", "symbol", "sessions", "=", "calendar", ".", "sessions_in_range", "(", "start_session", ",", "end_session", ")", "raw_data", ".", "set_index", "(", "[", "'date'", ",", "'symbol'", "]", ",", "inplace", "=", "True", ")", "daily_bar_writer", ".", "write", "(", "parse_pricing_and_vol", "(", "raw_data", ",", "sessions", ",", "symbol_map", ")", ",", "show_progress", "=", "show_progress", ")", "raw_data", ".", "reset_index", "(", "inplace", "=", "True", ")", "raw_data", "[", "'symbol'", "]", "=", "raw_data", "[", "'symbol'", "]", ".", "astype", "(", "'category'", ")", "raw_data", "[", "'sid'", "]", "=", "raw_data", ".", "symbol", ".", "cat", ".", "codes", "adjustment_writer", ".", "write", "(", "splits", "=", "parse_splits", "(", "raw_data", "[", "[", "'sid'", ",", "'date'", ",", "'split_ratio'", ",", "]", "]", ".", "loc", "[", "raw_data", ".", "split_ratio", "!=", "1", "]", ",", "show_progress", "=", "show_progress", ")", ",", "dividends", "=", "parse_dividends", "(", "raw_data", "[", "[", "'sid'", ",", "'date'", ",", "'ex_dividend'", ",", "]", "]", ".", "loc", "[", "raw_data", ".", "ex_dividend", "!=", "0", "]", ",", "show_progress", "=", "show_progress", ")", ")" ]
Return the total apparent size including children .
def app_size ( self ) : if self . _nodes is None : return self . _app_size return sum ( i . app_size ( ) for i in self . _nodes )
10,622
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L120-L124
[ "def", "__SendMediaRequest", "(", "self", ",", "request", ",", "end", ")", ":", "def", "CheckResponse", "(", "response", ")", ":", "if", "response", "is", "None", ":", "# Caller shouldn't call us if the response is None,", "# but handle anyway.", "raise", "exceptions", ".", "RequestError", "(", "'Request to url %s did not return a response.'", "%", "response", ".", "request_url", ")", "response", "=", "http_wrapper", ".", "MakeRequest", "(", "self", ".", "bytes_http", ",", "request", ",", "retry_func", "=", "self", ".", "retry_func", ",", "retries", "=", "self", ".", "num_retries", ",", "check_response_func", "=", "CheckResponse", ")", "if", "response", ".", "status_code", "==", "http_wrapper", ".", "RESUME_INCOMPLETE", ":", "last_byte", "=", "self", ".", "__GetLastByte", "(", "self", ".", "_GetRangeHeaderFromResponse", "(", "response", ")", ")", "if", "last_byte", "+", "1", "!=", "end", ":", "self", ".", "stream", ".", "seek", "(", "last_byte", "+", "1", ")", "return", "response" ]
Return the total used size including children .
def use_size ( self ) : if self . _nodes is None : return self . _use_size return sum ( i . use_size ( ) for i in self . _nodes )
10,623
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L127-L131
[ "def", "start_vm", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "vm_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourceGroups/'", ",", "resource_group", ",", "'/providers/Microsoft.Compute/virtualMachines/'", ",", "vm_name", ",", "'/start'", ",", "'?api-version='", ",", "COMP_API", "]", ")", "return", "do_post", "(", "endpoint", ",", "''", ",", "access_token", ")" ]
Return True and delete children if small enough .
def _prune_all_if_small ( self , small_size , a_or_u ) : if self . _nodes is None : return True total_size = ( self . app_size ( ) if a_or_u else self . use_size ( ) ) if total_size < small_size : if a_or_u : self . _set_size ( total_size , self . use_size ( ) ) else : self . _set_size ( self . app_size ( ) , total_size ) return True return False
10,624
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L153-L166
[ "async", "def", "start_authentication", "(", "self", ")", ":", "_", ",", "code", "=", "await", "self", ".", "http", ".", "post_data", "(", "'pair-pin-start'", ",", "headers", "=", "_AIRPLAY_HEADERS", ")", "if", "code", "!=", "200", ":", "raise", "DeviceAuthenticationError", "(", "'pair start failed'", ")" ]
Merge some nodes in the directory whilst keeping others .
def _prune_some_if_small ( self , small_size , a_or_u ) : # Assert that we're not messing things up. prev_app_size = self . app_size ( ) prev_use_size = self . use_size ( ) keep_nodes = [ ] prune_app_size = 0 prune_use_size = 0 for node in self . _nodes : node_size = node . app_size ( ) if a_or_u else node . use_size ( ) if node_size < small_size : if a_or_u : prune_app_size += node_size prune_use_size += node . use_size ( ) else : prune_app_size += node . app_size ( ) prune_use_size += node_size else : keep_nodes . append ( node ) # Last "leftover" node? Merge with parent. if len ( keep_nodes ) == 1 and keep_nodes [ - 1 ] . _isdir is None : prune_app_size += keep_nodes [ - 1 ] . _app_size prune_use_size += keep_nodes [ - 1 ] . _use_size keep_nodes = [ ] if prune_app_size : if not keep_nodes : # The only node to keep, no "leftovers" here. Move data # to the parent. keep_nodes = None assert self . _isdir and self . _nodes is not None self . _set_size ( prune_app_size , prune_use_size ) elif keep_nodes and keep_nodes [ - 1 ] . _isdir is None : # There was already a leftover node. Add the new leftovers. keep_nodes [ - 1 ] . _add_size ( prune_app_size , prune_use_size ) else : # Create a new leftover node. keep_nodes . append ( DuNode . new_leftovers ( self . _path , prune_app_size , prune_use_size ) ) # Update nodes and do the actual assertion. self . _nodes = keep_nodes assert prev_app_size == self . app_size ( ) , ( prev_app_size , self . app_size ( ) ) assert prev_use_size == self . use_size ( ) , ( prev_use_size , self . use_size ( ) )
10,625
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L168-L215
[ "def", "construct_error_message", "(", "driver_id", ",", "error_type", ",", "message", ",", "timestamp", ")", ":", "builder", "=", "flatbuffers", ".", "Builder", "(", "0", ")", "driver_offset", "=", "builder", ".", "CreateString", "(", "driver_id", ".", "binary", "(", ")", ")", "error_type_offset", "=", "builder", ".", "CreateString", "(", "error_type", ")", "message_offset", "=", "builder", ".", "CreateString", "(", "message", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataStart", "(", "builder", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddDriverId", "(", "builder", ",", "driver_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddType", "(", "builder", ",", "error_type_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddErrorMessage", "(", "builder", ",", "message_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddTimestamp", "(", "builder", ",", "timestamp", ")", "error_data_offset", "=", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataEnd", "(", "builder", ")", "builder", ".", "Finish", "(", "error_data_offset", ")", "return", "bytes", "(", "builder", ".", "Output", "(", ")", ")" ]
After prune_if_smaller_than is run we may still have excess nodes .
def merge_upwards_if_smaller_than ( self , small_size , a_or_u ) : # Assert that we're not messing things up. prev_app_size = self . app_size ( ) prev_use_size = self . use_size ( ) small_nodes = self . _find_small_nodes ( small_size , ( ) , a_or_u ) for node , parents in small_nodes : # Check immediate grandparent for isdir=None and if it # exists, move this there. The isdir=None node is always # last. if len ( parents ) >= 2 : tail = parents [ - 2 ] . _nodes [ - 1 ] if tail . _isdir is None : assert tail . _app_size is not None , tail tail . _add_size ( node . app_size ( ) , node . use_size ( ) ) parents [ - 1 ] . _nodes . remove ( node ) assert len ( parents [ - 1 ] . _nodes ) # The actual assertion. assert prev_app_size == self . app_size ( ) , ( prev_app_size , self . app_size ( ) ) assert prev_use_size == self . use_size ( ) , ( prev_use_size , self . use_size ( ) )
10,626
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L217-L258
[ "def", "reset", "(", "self", ")", ":", "logger", ".", "debug", "(", "'StackInABoxService ({0}): Reset'", ".", "format", "(", "self", ".", "__id", ",", "self", ".", "name", ")", ")", "self", ".", "base_url", "=", "'/{0}'", ".", "format", "(", "self", ".", "name", ")", "logger", ".", "debug", "(", "'StackInABoxService ({0}): Hosting Service {1}'", ".", "format", "(", "self", ".", "__id", ",", "self", ".", "name", ")", ")" ]
Return the nodes as a list of lists .
def as_tree ( self ) : if self . _nodes is None : return [ self ] ret = [ self ] for node in self . _nodes : ret . append ( node . as_tree ( ) ) return ret
10,627
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L272-L279
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Immediately check if we can access path . Otherwise bail .
def _check_path ( self ) : if not path . isdir ( self . _path or '/' ) : raise OSError ( 'Path {!r} is not a directory' . format ( self . _path ) )
10,628
https://github.com/ossobv/dutree/blob/adceeeb17f9fd70a7ed9c674850d7015d820eb2a/dutree/dutree.py#L320-L323
[ "def", "insert_many", "(", "self", ",", "rows", ",", "chunk_size", "=", "1000", ",", "ensure", "=", "None", ",", "types", "=", "None", ")", ":", "chunk", "=", "[", "]", "for", "row", "in", "rows", ":", "row", "=", "self", ".", "_sync_columns", "(", "row", ",", "ensure", ",", "types", "=", "types", ")", "chunk", ".", "append", "(", "row", ")", "if", "len", "(", "chunk", ")", "==", "chunk_size", ":", "chunk", "=", "pad_chunk_columns", "(", "chunk", ")", "self", ".", "table", ".", "insert", "(", ")", ".", "execute", "(", "chunk", ")", "chunk", "=", "[", "]", "if", "len", "(", "chunk", ")", ":", "chunk", "=", "pad_chunk_columns", "(", "chunk", ")", "self", ".", "table", ".", "insert", "(", ")", ".", "execute", "(", "chunk", ")" ]
The dmenu command s version message .
def version ( command = 'dmenu' ) : args = [ command , '-v' ] try : # start the dmenu process proc = subprocess . Popen ( args , universal_newlines = True , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) except OSError as err : # something went wrong with starting the process raise DmenuCommandError ( args , err ) if proc . wait ( ) == 0 : # version information from stdout return proc . stdout . read ( ) . rstrip ( '\n' ) # error from dmenu raise DmenuCommandError ( args , proc . stderr . read ( ) )
10,629
https://github.com/allonhadaya/dmenu-python/blob/30eca49a9368c61e13e87f530cc3785a369536c4/dmenu/dmenu.py#L28-L59
[ "def", "new_regid_custom_field", "(", "uwregid", ")", ":", "return", "BridgeCustomField", "(", "field_id", "=", "get_regid_field_id", "(", ")", ",", "name", "=", "BridgeCustomField", ".", "REGID_NAME", ",", "value", "=", "uwregid", ")" ]
Present a dmenu to the user .
def show ( items , command = 'dmenu' , bottom = None , fast = None , case_insensitive = None , lines = None , monitor = None , prompt = None , font = None , background = None , foreground = None , background_selected = None , foreground_selected = None ) : # construct args args = [ command ] if bottom : args . append ( '-b' ) if fast : args . append ( '-f' ) if case_insensitive : args . append ( '-i' ) if lines is not None : args . extend ( ( '-l' , str ( lines ) ) ) if monitor is not None : args . extend ( ( '-m' , str ( monitor ) ) ) if prompt is not None : args . extend ( ( '-p' , prompt ) ) if font is not None : args . extend ( ( '-fn' , font ) ) if background is not None : args . extend ( ( '-nb' , background ) ) if foreground is not None : args . extend ( ( '-nf' , foreground ) ) if background_selected is not None : args . extend ( ( '-sb' , background_selected ) ) if foreground_selected is not None : args . extend ( ( '-sf' , foreground_selected ) ) try : # start the dmenu process proc = subprocess . Popen ( args , universal_newlines = True , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) except OSError as err : # something went wrong with starting the process raise DmenuCommandError ( args , err ) # write items over to dmenu with proc . stdin : for item in items : proc . stdin . write ( item ) proc . stdin . write ( '\n' ) if proc . wait ( ) == 0 : # user made a selection return proc . stdout . read ( ) . rstrip ( '\n' ) stderr = proc . stderr . read ( ) if stderr == '' : # user hit escape return None if re . match ( 'usage' , stderr , re . I ) : # usage error raise DmenuUsageError ( args , stderr ) # other error from dmenu raise DmenuCommandError ( args , stderr )
10,630
https://github.com/allonhadaya/dmenu-python/blob/30eca49a9368c61e13e87f530cc3785a369536c4/dmenu/dmenu.py#L62-L206
[ "def", "_process_table_cells", "(", "self", ",", "table", ")", ":", "rows", "=", "[", "]", "for", "i", ",", "tr", "in", "enumerate", "(", "table", ".", "find_all", "(", "'tr'", ")", ")", ":", "row", "=", "[", "]", "for", "c", "in", "tr", ".", "contents", ":", "cell_type", "=", "getattr", "(", "c", ",", "'name'", ",", "None", ")", "if", "cell_type", "not", "in", "(", "'td'", ",", "'th'", ")", ":", "continue", "rowspan", "=", "int", "(", "c", ".", "attrs", ".", "get", "(", "'rowspan'", ",", "1", ")", ")", "colspan", "=", "int", "(", "c", ".", "attrs", ".", "get", "(", "'colspan'", ",", "1", ")", ")", "contents", "=", "self", ".", "_process_children", "(", "c", ")", ".", "strip", "(", ")", "if", "cell_type", "==", "'th'", "and", "i", ">", "0", ":", "contents", "=", "self", ".", "_inline", "(", "'**'", ",", "contents", ")", "row", ".", "append", "(", "Cell", "(", "cell_type", ",", "rowspan", ",", "colspan", ",", "contents", ")", ")", "rows", ".", "append", "(", "row", ")", "return", "rows" ]
Get the graph of up - regulated genes .
def get_upregulated_genes_network ( self ) -> Graph : logger . info ( "In get_upregulated_genes_network()" ) deg_graph = self . graph . copy ( ) # deep copy graph not_diff_expr = self . graph . vs ( up_regulated_eq = False ) # delete genes which are not differentially expressed or have no connections to others deg_graph . delete_vertices ( not_diff_expr . indices ) deg_graph . delete_vertices ( deg_graph . vs . select ( _degree_eq = 0 ) ) return deg_graph
10,631
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/filtered_network.py#L27-L41
[ "def", "pull_session", "(", "session_id", "=", "None", ",", "url", "=", "'default'", ",", "io_loop", "=", "None", ",", "arguments", "=", "None", ")", ":", "coords", "=", "_SessionCoordinates", "(", "session_id", "=", "session_id", ",", "url", "=", "url", ")", "session", "=", "ClientSession", "(", "session_id", "=", "session_id", ",", "websocket_url", "=", "websocket_url_for_server_url", "(", "coords", ".", "url", ")", ",", "io_loop", "=", "io_loop", ",", "arguments", "=", "arguments", ")", "session", ".", "pull", "(", ")", "return", "session" ]
Get the graph of down - regulated genes .
def get_downregulated_genes_network ( self ) -> Graph : logger . info ( "In get_downregulated_genes_network()" ) deg_graph = self . graph . copy ( ) # deep copy graph not_diff_expr = self . graph . vs ( down_regulated_eq = False ) # delete genes which are not differentially expressed or have no connections to others deg_graph . delete_vertices ( not_diff_expr . indices ) deg_graph . delete_vertices ( deg_graph . vs . select ( _degree_eq = 0 ) ) return deg_graph
10,632
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/filtered_network.py#L43-L57
[ "def", "pull_session", "(", "session_id", "=", "None", ",", "url", "=", "'default'", ",", "io_loop", "=", "None", ",", "arguments", "=", "None", ")", ":", "coords", "=", "_SessionCoordinates", "(", "session_id", "=", "session_id", ",", "url", "=", "url", ")", "session", "=", "ClientSession", "(", "session_id", "=", "session_id", ",", "websocket_url", "=", "websocket_url_for_server_url", "(", "coords", ".", "url", ")", ",", "io_loop", "=", "io_loop", ",", "arguments", "=", "arguments", ")", "session", ".", "pull", "(", ")", "return", "session" ]
Generate the parser for all sub - commands
def make_parser ( ) : parser = argparse . ArgumentParser ( description = 'BERNARD CLI utility' ) sp = parser . add_subparsers ( help = 'Sub-command' ) parser_run = sp . add_parser ( 'run' , help = 'Run the BERNARD server' ) parser_run . set_defaults ( action = 'run' ) parser_sheet = sp . add_parser ( 'sheet' , help = 'Import files from Google ' 'Sheets' ) parser_sheet . set_defaults ( action = 'sheet' ) parser_sheet . add_argument ( '--auth_host_name' , default = 'localhost' , help = 'Hostname when running a local web server.' ) parser_sheet . add_argument ( '--noauth_local_webserver' , action = 'store_true' , default = False , help = 'Do not run a local web server.' ) parser_sheet . add_argument ( '--auth_host_port' , default = [ 8080 , 8090 ] , type = int , nargs = '*' , help = 'Port web server should listen on.' ) parser_sheet . add_argument ( '--logging_level' , default = 'ERROR' , choices = [ 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' ] , help = 'Set the logging level of detail.' ) parser_sp = sp . add_parser ( 'start_project' , help = 'Starts a project' ) parser_sp . set_defaults ( action = 'start_project' ) parser_sp . add_argument ( 'project_name' , help = 'A snake-case name for your project' ) parser_sp . add_argument ( 'dir' , help = 'Directory to store the project' ) return parser
10,633
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/main/_base.py#L5-L55
[ "def", "handlePortfolio", "(", "self", ",", "msg", ")", ":", "# log handler msg", "self", ".", "log_msg", "(", "\"portfolio\"", ",", "msg", ")", "# contract identifier", "contract_tuple", "=", "self", ".", "contract_to_tuple", "(", "msg", ".", "contract", ")", "contractString", "=", "self", ".", "contractString", "(", "contract_tuple", ")", "# try creating the contract", "self", ".", "registerContract", "(", "msg", ".", "contract", ")", "# new account?", "if", "msg", ".", "accountName", "not", "in", "self", ".", "_portfolios", ".", "keys", "(", ")", ":", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "=", "{", "}", "self", ".", "_portfolios", "[", "msg", ".", "accountName", "]", "[", "contractString", "]", "=", "{", "\"symbol\"", ":", "contractString", ",", "\"position\"", ":", "int", "(", "msg", ".", "position", ")", ",", "\"marketPrice\"", ":", "float", "(", "msg", ".", "marketPrice", ")", ",", "\"marketValue\"", ":", "float", "(", "msg", ".", "marketValue", ")", ",", "\"averageCost\"", ":", "float", "(", "msg", ".", "averageCost", ")", ",", "\"unrealizedPNL\"", ":", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"realizedPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", ",", "\"totalPNL\"", ":", "float", "(", "msg", ".", "realizedPNL", ")", "+", "float", "(", "msg", ".", "unrealizedPNL", ")", ",", "\"account\"", ":", "msg", ".", "accountName", "}", "# fire callback", "self", ".", "ibCallback", "(", "caller", "=", "\"handlePortfolio\"", ",", "msg", "=", "msg", ")" ]
Run the appropriate main function according to the output of the parser .
def main ( ) : parser = make_parser ( ) args = parser . parse_args ( ) if not hasattr ( args , 'action' ) : parser . print_help ( ) exit ( 1 ) if args . action == 'sheet' : from bernard . misc . sheet_sync import main as main_sheet main_sheet ( args ) elif args . action == 'run' : from bernard . cli import main as main_run main_run ( ) elif args . action == 'start_project' : from bernard . misc . start_project import main as main_sp main_sp ( args )
10,634
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/main/_base.py#L58-L78
[ "def", "ensure_compatible_admin", "(", "view", ")", ":", "def", "wrapper", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "user_roles", "=", "request", ".", "user", ".", "user_data", ".", "get", "(", "'roles'", ",", "[", "]", ")", "if", "len", "(", "user_roles", ")", "!=", "1", ":", "context", "=", "{", "'message'", ":", "'I need to be able to manage user accounts. '", "'My username is %s'", "%", "request", ".", "user", ".", "username", "}", "return", "render", "(", "request", ",", "'mtp_common/user_admin/incompatible-admin.html'", ",", "context", "=", "context", ")", "return", "view", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper" ]
Read a . env file and load into os . environ .
def load_dotenv ( dotenv_path , verbose = False ) : if not os . path . exists ( dotenv_path ) : if verbose : warnings . warn ( f"Not loading {dotenv_path}, it doesn't exist." ) return None for k , v in dotenv_values ( dotenv_path ) . items ( ) : os . environ . setdefault ( k , v ) return True
10,635
https://github.com/sthysel/knobs/blob/1d01f50f643068076e38118a93fed9375ea3ac81/src/environment.py#L20-L35
[ "def", "create_pgroup_snapshot", "(", "self", ",", "source", ",", "*", "*", "kwargs", ")", ":", "# In REST 1.4, support was added for snapshotting multiple pgroups. As a", "# result, the endpoint response changed from an object to an array of", "# objects. To keep the response type consistent between REST versions,", "# we unbox the response when creating a single snapshot.", "result", "=", "self", ".", "create_pgroup_snapshots", "(", "[", "source", "]", ",", "*", "*", "kwargs", ")", "if", "self", ".", "_rest_version", ">=", "LooseVersion", "(", "\"1.4\"", ")", ":", "headers", "=", "result", ".", "headers", "result", "=", "ResponseDict", "(", "result", "[", "0", "]", ")", "result", ".", "headers", "=", "headers", "return", "result" ]
Gets the value of a given key from the given . env
def get_key ( dotenv_path , key_to_get , verbose = False ) : key_to_get = str ( key_to_get ) if not os . path . exists ( dotenv_path ) : if verbose : warnings . warn ( f"Can't read {dotenv_path}, it doesn't exist." ) return None dotenv_as_dict = dotenv_values ( dotenv_path ) if key_to_get in dotenv_as_dict : return dotenv_as_dict [ key_to_get ] else : if verbose : warnings . warn ( f"key {key_to_get} not found in {dotenv_path}." ) return None
10,636
https://github.com/sthysel/knobs/blob/1d01f50f643068076e38118a93fed9375ea3ac81/src/environment.py#L38-L59
[ "def", "libvlc_video_set_crop_geometry", "(", "p_mi", ",", "psz_geometry", ")", ":", "f", "=", "_Cfunctions", ".", "get", "(", "'libvlc_video_set_crop_geometry'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_video_set_crop_geometry'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "ctypes", ".", "c_char_p", ")", "return", "f", "(", "p_mi", ",", "psz_geometry", ")" ]
Returns the quote format depending on the quote_mode . This determines if the key value will be quoted when written to the env file .
def _get_format ( value , quote_mode = 'always' ) : formats = { 'always' : '{key}="{value}"\n' , 'auto' : '{key}={value}\n' } if quote_mode not in formats . keys ( ) : return KeyError ( f'quote_mode {quote_mode} is invalid' ) _mode = quote_mode if quote_mode == 'auto' and ' ' in value : _mode = 'always' return formats . get ( _mode )
10,637
https://github.com/sthysel/knobs/blob/1d01f50f643068076e38118a93fed9375ea3ac81/src/environment.py#L181-L201
[ "def", "detail_dict", "(", "self", ")", ":", "d", "=", "self", ".", "dict", "def", "aug_col", "(", "c", ")", ":", "d", "=", "c", ".", "dict", "d", "[", "'stats'", "]", "=", "[", "s", ".", "dict", "for", "s", "in", "c", ".", "stats", "]", "return", "d", "d", "[", "'table'", "]", "=", "self", ".", "table", ".", "dict", "d", "[", "'table'", "]", "[", "'columns'", "]", "=", "[", "aug_col", "(", "c", ")", "for", "c", "in", "self", ".", "table", ".", "columns", "]", "return", "d" ]
Search in increasingly higher folders for the given file
def find_dotenv ( filename = '.env' , raise_error_if_not_found = False , usecwd = False ) : if usecwd or '__file__' not in globals ( ) : # should work without __file__, e.g. in REPL or IPython notebook path = os . getcwd ( ) else : # will work for .py files frame_filename = sys . _getframe ( ) . f_back . f_code . co_filename path = os . path . dirname ( os . path . abspath ( frame_filename ) ) for dirname in _walk_to_root ( path ) : check_path = os . path . join ( dirname , filename ) if os . path . exists ( check_path ) : return check_path if raise_error_if_not_found : raise IOError ( 'File not found' ) return ''
10,638
https://github.com/sthysel/knobs/blob/1d01f50f643068076e38118a93fed9375ea3ac81/src/environment.py#L237-L259
[ "def", "construct_error_message", "(", "driver_id", ",", "error_type", ",", "message", ",", "timestamp", ")", ":", "builder", "=", "flatbuffers", ".", "Builder", "(", "0", ")", "driver_offset", "=", "builder", ".", "CreateString", "(", "driver_id", ".", "binary", "(", ")", ")", "error_type_offset", "=", "builder", ".", "CreateString", "(", "error_type", ")", "message_offset", "=", "builder", ".", "CreateString", "(", "message", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataStart", "(", "builder", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddDriverId", "(", "builder", ",", "driver_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddType", "(", "builder", ",", "error_type_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddErrorMessage", "(", "builder", ",", "message_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddTimestamp", "(", "builder", ",", "timestamp", ")", "error_data_offset", "=", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataEnd", "(", "builder", ")", "builder", ".", "Finish", "(", "error_data_offset", ")", "return", "bytes", "(", "builder", ".", "Output", "(", ")", ")" ]
Decorator for reduction methods .
def reducer ( * tokens ) : def wrapper ( func ) : # Make sure that we have a list of reducer sequences if not hasattr ( func , 'reducers' ) : func . reducers = [ ] # Add the token to the list of reducer sequences func . reducers . append ( list ( tokens ) ) return func return wrapper
10,639
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L19-L36
[ "def", "create_or_update_secret", "(", "self", ",", "path", ",", "secret", ",", "cas", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "'options'", ":", "{", "}", ",", "'data'", ":", "secret", ",", "}", "if", "cas", "is", "not", "None", ":", "params", "[", "'options'", "]", "[", "'cas'", "]", "=", "cas", "api_path", "=", "'/v1/{mount_point}/data/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "response", "=", "self", ".", "_adapter", ".", "post", "(", "url", "=", "api_path", ",", "json", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
Parses policy to a tree of Check objects .
def parse_rule ( rule : str , raise_error = False ) : parser = Parser ( raise_error ) return parser . parse ( rule )
10,640
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L269-L273
[ "def", "gamma", "(", "phi1", ",", "phi2", ",", "theta1", ",", "theta2", ")", ":", "if", "phi1", "==", "phi2", "and", "theta1", "==", "theta2", ":", "gamma", "=", "0", "else", ":", "gamma", "=", "atan", "(", "sin", "(", "theta2", ")", "*", "sin", "(", "phi2", "-", "phi1", ")", "/", "(", "cos", "(", "theta1", ")", "*", "sin", "(", "theta2", ")", "*", "cos", "(", "phi1", "-", "phi2", ")", "-", "sin", "(", "theta1", ")", "*", "cos", "(", "theta2", ")", ")", ")", "dummy_arg", "=", "(", "cos", "(", "gamma", ")", "*", "cos", "(", "theta1", ")", "*", "sin", "(", "theta2", ")", "*", "cos", "(", "phi1", "-", "phi2", ")", "+", "sin", "(", "gamma", ")", "*", "sin", "(", "theta2", ")", "*", "sin", "(", "phi2", "-", "phi1", ")", "-", "cos", "(", "gamma", ")", "*", "sin", "(", "theta1", ")", "*", "cos", "(", "theta2", ")", ")", "if", "dummy_arg", ">=", "0", ":", "return", "gamma", "else", ":", "return", "pi", "+", "gamma" ]
Perform a greedy reduction of token stream .
def _reduce ( self ) : for reduction , methname in self . reducers : token_num = len ( reduction ) if ( len ( self . tokens ) >= token_num and self . tokens [ - token_num : ] == reduction ) : # Get the reduction method meth = getattr ( self , methname ) # Reduce the token stream results = meth ( * self . values [ - token_num : ] ) self . tokens [ - token_num : ] = [ r [ 0 ] for r in results ] self . values [ - token_num : ] = [ r [ 1 ] for r in results ] # Check for any more reductions return self . _reduce ( )
10,641
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L75-L97
[ "def", "patch_requests", "(", ")", ":", "config", ".", "create_config_directory", "(", ")", "ca_certs_file", "=", "config", ".", "CERT_FILE", "ca_certs_contents", "=", "requests", ".", "__loader__", ".", "get_data", "(", "'requests/cacert.pem'", ")", "should_write_certs", "=", "True", "if", "os", ".", "path", ".", "isfile", "(", "ca_certs_file", ")", ":", "with", "open", "(", "ca_certs_file", ",", "'rb'", ")", "as", "f", ":", "existing_certs", "=", "f", ".", "read", "(", ")", "if", "existing_certs", "!=", "ca_certs_contents", ":", "should_write_certs", "=", "True", "print", "(", "\"Updating local SSL certificates\"", ")", "else", ":", "should_write_certs", "=", "False", "if", "should_write_certs", ":", "with", "open", "(", "ca_certs_file", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "ca_certs_contents", ")", "os", ".", "environ", "[", "'REQUESTS_CA_BUNDLE'", "]", "=", "ca_certs_file" ]
Parse a single base check rule into an appropriate Check object .
def _parse_check ( self , rule ) : # Handle the special constant-type checks for check_cls in ( checks . FalseCheck , checks . TrueCheck ) : check = check_cls ( ) if rule == str ( check ) : return check try : kind , match = rule . split ( ':' , 1 ) except Exception : if self . raise_error : raise InvalidRuleException ( rule ) else : LOG . exception ( 'Failed to understand rule %r' , rule ) # If the rule is invalid, we'll fail closed return checks . FalseCheck ( ) if kind in checks . registered_checks : return checks . registered_checks [ kind ] ( kind , match ) elif None in checks . registered_checks : return checks . registered_checks [ None ] ( kind , match ) elif self . raise_error : raise InvalidRuleException ( rule ) else : LOG . error ( 'No handler for matches of kind %r' , kind ) # If the rule is invalid, we'll fail closed return checks . FalseCheck ( )
10,642
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L117-L145
[ "def", "set_nvidia_environment_variables", "(", "environment", ",", "gpu_ids", ")", ":", "if", "gpu_ids", ":", "nvidia_visible_devices", "=", "\"\"", "for", "gpu_id", "in", "gpu_ids", ":", "nvidia_visible_devices", "+=", "\"{},\"", ".", "format", "(", "gpu_id", ")", "environment", "[", "\"NVIDIA_VISIBLE_DEVICES\"", "]", "=", "nvidia_visible_devices" ]
Tokenizer for the policy language .
def _parse_tokenize ( self , rule ) : for token in self . _TOKENIZE_RE . split ( rule ) : # Skip empty tokens if not token or token . isspace ( ) : continue # Handle leading parens on the token clean = token . lstrip ( '(' ) for i in range ( len ( token ) - len ( clean ) ) : yield '(' , '(' # If it was only parentheses, continue if not clean : continue else : token = clean # Handle trailing parens on the token clean = token . rstrip ( ')' ) trail = len ( token ) - len ( clean ) # Yield the cleaned token lowered = clean . lower ( ) if lowered in ( 'and' , 'or' , 'not' ) : # Special tokens yield lowered , clean elif clean : # Not a special token, but not composed solely of ')' if len ( token ) >= 2 and ( ( token [ 0 ] , token [ - 1 ] ) in [ ( '"' , '"' ) , ( "'" , "'" ) ] ) : # It's a quoted string yield 'string' , token [ 1 : - 1 ] else : yield 'check' , self . _parse_check ( clean ) # Yield the trailing parens for i in range ( trail ) : yield ')' , ')'
10,643
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L147-L186
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Parses policy to tree .
def parse ( self , rule : str ) : # Empty rule means always accept if not rule : return checks . TrueCheck ( ) for token , value in self . _parse_tokenize ( rule ) : self . _shift ( token , value ) try : return self . result except ValueError : LOG . exception ( 'Failed to understand rule %r' , rule ) # Fail closed return checks . FalseCheck ( )
10,644
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L188-L207
[ "def", "remove_nones", "(", "*", "*", "kwargs", ")", ":", "return", "dict", "(", "(", "k", ",", "v", ")", "for", "k", ",", "v", "in", "kwargs", ".", "iteritems", "(", ")", "if", "v", "is", "not", "None", ")" ]
Modify the case A or B and C
def _mix_or_and_expr ( self , or_expr , _and , check ) : or_expr , check1 = or_expr . pop_check ( ) if isinstance ( check1 , checks . AndCheck ) : and_expr = check1 and_expr . add_check ( check ) else : and_expr = checks . AndCheck ( check1 , check ) return [ ( 'or_expr' , or_expr . add_check ( and_expr ) ) ]
10,645
https://github.com/garenchan/policy/blob/7709ae5f371146f8c90380d0877a5e59d731f644/policy/_parser.py#L226-L238
[ "def", "_ParseEntryObjectOffsets", "(", "self", ",", "file_object", ",", "file_offset", ")", ":", "entry_array_object", "=", "self", ".", "_ParseEntryArrayObject", "(", "file_object", ",", "file_offset", ")", "entry_object_offsets", "=", "list", "(", "entry_array_object", ".", "entry_object_offsets", ")", "while", "entry_array_object", ".", "next_entry_array_offset", "!=", "0", ":", "entry_array_object", "=", "self", ".", "_ParseEntryArrayObject", "(", "file_object", ",", "entry_array_object", ".", "next_entry_array_offset", ")", "entry_object_offsets", ".", "extend", "(", "entry_array_object", ".", "entry_object_offsets", ")", "return", "entry_object_offsets" ]
Update parser grammar to add a list of allowed keywords .
def build_valid_keywords_grammar ( keywords = None ) : from invenio_query_parser . parser import KeywordQuery , KeywordRule , NotKeywordValue , SimpleQuery , ValueQuery if keywords : KeywordRule . grammar = attr ( 'value' , re . compile ( r"(\d\d\d\w{{0,3}}|{0})\b" . format ( "|" . join ( keywords ) , re . I ) ) ) NotKeywordValue . grammar = attr ( 'value' , re . compile ( r'\b(?!\d\d\d\w{{0,3}}|{0}:)\S+\b:' . format ( ":|" . join ( keywords ) ) ) ) SimpleQuery . grammar = attr ( 'op' , [ NotKeywordValue , KeywordQuery , ValueQuery ] ) else : KeywordRule . grammar = attr ( 'value' , re . compile ( r"[\w\d]+(\.[\w\d]+)*" ) ) SimpleQuery . grammar = attr ( 'op' , [ KeywordQuery , ValueQuery ] )
10,646
https://github.com/inveniosoftware/invenio-query-parser/blob/21a2c36318003ff52d2e18e7196bb420db8ecb4b/invenio_query_parser/utils.py#L34-L51
[ "def", "setOverlayTransformOverlayRelative", "(", "self", ",", "ulOverlayHandle", ",", "ulOverlayHandleParent", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformOverlayRelative", "pmatParentOverlayToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "ulOverlayHandleParent", ",", "byref", "(", "pmatParentOverlayToOverlayTransform", ")", ")", "return", "result", ",", "pmatParentOverlayToOverlayTransform" ]
Renders the given element to string .
def render ( self , element ) : # Store the root node to provide some context to render functions if not self . root_node : self . root_node = element render_func = getattr ( self , self . _cls_to_func_name ( element . __class__ ) , None ) if not render_func : render_func = self . render_children return render_func ( element )
10,647
https://github.com/frostming/marko/blob/1cd030b665fa37bad1f8b3a25a89ce1a7c491dde/marko/renderer.py#L37-L50
[ "def", "results", "(", "self", ",", "trial_ids", ")", ":", "metadata_folder", "=", "os", ".", "path", ".", "join", "(", "self", ".", "log_dir", ",", "constants", ".", "METADATA_FOLDER", ")", "dfs", "=", "[", "]", "# TODO: various file-creation corner cases like the result file not", "# always existing if stuff is not logged and etc should be ironed out", "# (would probably be easier if we had a centralized Sync class which", "# relied on some formal remote store semantics).", "for", "trial_id", "in", "trial_ids", ":", "# TODO constants should just contain the recipes for filename", "# construction instead of this multi-file implicit constraint", "result_file", "=", "os", ".", "path", ".", "join", "(", "metadata_folder", ",", "trial_id", "+", "\"_\"", "+", "constants", ".", "RESULT_SUFFIX", ")", "assert", "os", ".", "path", ".", "isfile", "(", "result_file", ")", ",", "result_file", "dfs", ".", "append", "(", "pd", ".", "read_json", "(", "result_file", ",", "typ", "=", "'frame'", ",", "lines", "=", "True", ")", ")", "df", "=", "pd", ".", "concat", "(", "dfs", ",", "axis", "=", "0", ",", "ignore_index", "=", "True", ",", "sort", "=", "False", ")", "return", "df" ]
Recursively renders child elements . Joins the rendered strings with no space in between .
def render_children ( self , element ) : rendered = [ self . render ( child ) for child in element . children ] return '' . join ( rendered )
10,648
https://github.com/frostming/marko/blob/1cd030b665fa37bad1f8b3a25a89ce1a7c491dde/marko/renderer.py#L52-L65
[ "def", "create", "(", "cls", ",", "destination", ")", ":", "mdb_gz_b64", "=", "\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"", "pristine", "=", "StringIO", "(", ")", "pristine", ".", "write", "(", "base64", ".", "b64decode", "(", "mdb_gz_b64", ")", ")", "pristine", ".", "seek", "(", "0", ")", "pristine", "=", "gzip", ".", "GzipFile", "(", "fileobj", "=", "pristine", ",", "mode", "=", "'rb'", ")", "with", "open", "(", "destination", ",", "'wb'", ")", "as", "handle", ":", "shutil", ".", "copyfileobj", "(", "pristine", ",", "handle", ")", "return", "cls", "(", "destination", ")" ]
Adds the groups assigned to this user to a groups field .
def setGroups ( self , * args , * * kwargs ) : try : groups = self . mambugroupsclass ( creditOfficerUsername = self [ 'username' ] , * args , * * kwargs ) except AttributeError as ae : from . mambugroup import MambuGroups self . mambugroupsclass = MambuGroups groups = self . mambugroupsclass ( creditOfficerUsername = self [ 'username' ] , * args , * * kwargs ) self [ 'groups' ] = groups return 1
10,649
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuuser.py#L60-L73
[ "def", "remove_thin_device", "(", "name", ",", "force", "=", "False", ")", ":", "cmd", "=", "[", "'dmsetup'", ",", "'remove'", ",", "'--retry'", ",", "name", "]", "r", "=", "util", ".", "subp", "(", "cmd", ")", "if", "not", "force", ":", "if", "r", ".", "return_code", "!=", "0", ":", "raise", "MountError", "(", "'Could not remove thin device:\\n%s'", "%", "r", ".", "stderr", ".", "decode", "(", "sys", ".", "getdefaultencoding", "(", ")", ")", ".", "split", "(", "\"\\n\"", ")", "[", "0", "]", ")" ]
Adds the role assigned to this user to a role field .
def setRoles ( self , * args , * * kwargs ) : try : role = self . mamburoleclass ( entid = self [ 'role' ] [ 'encodedKey' ] , * args , * * kwargs ) except KeyError : return 0 except AttributeError as ae : from . mamburoles import MambuRole self . mamburoleclass = MambuRole try : role = self . mamburoleclass ( entid = self [ 'role' ] [ 'encodedKey' ] , * args , * * kwargs ) except KeyError : return 0 self [ 'role' ] [ 'role' ] = role return 1
10,650
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuuser.py#L76-L97
[ "def", "get_or_generate_vocabulary", "(", "data_dir", ",", "tmp_dir", ",", "data_prefix", ",", "max_page_size_exp", ",", "approx_vocab_size", "=", "32768", ",", "strip", "=", "True", ")", ":", "num_pages_for_vocab_generation", "=", "approx_vocab_size", "//", "3", "vocab_file", "=", "vocab_filename", "(", "approx_vocab_size", ",", "strip", ")", "def", "my_generator", "(", "data_prefix", ")", ":", "\"\"\"Line generator for vocab.\"\"\"", "count", "=", "0", "for", "page", "in", "corpus_page_generator", "(", "all_corpus_files", "(", "data_prefix", ")", "[", ":", ":", "-", "1", "]", ",", "tmp_dir", ",", "max_page_size_exp", ")", ":", "revisions", "=", "page", "[", "\"revisions\"", "]", "if", "revisions", ":", "text", "=", "get_text", "(", "revisions", "[", "-", "1", "]", ",", "strip", "=", "strip", ")", "yield", "text", "count", "+=", "1", "if", "count", "%", "100", "==", "0", ":", "tf", ".", "logging", ".", "info", "(", "\"reading pages for vocab %d\"", "%", "count", ")", "if", "count", ">", "num_pages_for_vocab_generation", ":", "break", "return", "generator_utils", ".", "get_or_generate_vocab_inner", "(", "data_dir", ",", "vocab_file", ",", "approx_vocab_size", ",", "my_generator", "(", "data_prefix", ")", ")" ]
Creates an user in Mambu
def create ( self , data , * args , * * kwargs ) : super ( MambuUser , self ) . create ( data ) self [ 'user' ] [ self . customFieldName ] = self [ 'customInformation' ] self . init ( attrs = self [ 'user' ] )
10,651
https://github.com/jstitch/MambuPy/blob/2af98cc12e7ed5ec183b3e97644e880e70b79ee8/MambuPy/rest/mambuuser.py#L100-L109
[ "def", "set_column_count", "(", "self", ",", "count", ")", ":", "current_row_count", "=", "self", ".", "row_count", "(", ")", "current_column_count", "=", "self", ".", "column_count", "(", ")", "if", "count", ">", "current_column_count", ":", "cl", "=", "TableEditableItem", "if", "self", ".", "_editable", "else", "TableItem", "for", "r_key", "in", "self", ".", "children", ".", "keys", "(", ")", ":", "row", "=", "self", ".", "children", "[", "r_key", "]", "for", "i", "in", "range", "(", "current_column_count", ",", "count", ")", ":", "row", ".", "append", "(", "cl", "(", ")", ",", "str", "(", "i", ")", ")", "if", "self", ".", "_editable", ":", "row", ".", "children", "[", "str", "(", "i", ")", "]", ".", "onchange", ".", "connect", "(", "self", ".", "on_item_changed", ",", "int", "(", "r_key", ")", ",", "int", "(", "i", ")", ")", "self", ".", "_update_first_row", "(", ")", "elif", "count", "<", "current_column_count", ":", "for", "row", "in", "self", ".", "children", ".", "values", "(", ")", ":", "for", "i", "in", "range", "(", "count", ",", "current_column_count", ")", ":", "row", ".", "remove_child", "(", "row", ".", "children", "[", "str", "(", "i", ")", "]", ")", "self", ".", "_column_count", "=", "count" ]
Write the bipartite attribute graph to a file .
def write_attribute_adj_list ( self , path ) : att_mappings = self . get_attribute_mappings ( ) with open ( path , mode = "w" ) as file : for k , v in att_mappings . items ( ) : print ( "{} {}" . format ( k , " " . join ( str ( e ) for e in v ) ) , file = file )
10,652
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L25-L34
[ "def", "restart", "(", "self", ",", "timeout", "=", "None", ")", ":", "msg", "=", "{", "\"value\"", ":", "\"Restart requested by \"", "+", "self", ".", "username", "+", "\"via the Splunk SDK for Python\"", "}", "# This message will be deleted once the server actually restarts.", "self", ".", "messages", ".", "create", "(", "name", "=", "\"restart_required\"", ",", "*", "*", "msg", ")", "result", "=", "self", ".", "post", "(", "\"server/control/restart\"", ")", "if", "timeout", "is", "None", ":", "return", "result", "start", "=", "datetime", ".", "now", "(", ")", "diff", "=", "timedelta", "(", "seconds", "=", "timeout", ")", "while", "datetime", ".", "now", "(", ")", "-", "start", "<", "diff", ":", "try", ":", "self", ".", "login", "(", ")", "if", "not", "self", ".", "restart_required", ":", "return", "result", "except", "Exception", "as", "e", ":", "sleep", "(", "1", ")", "raise", "Exception", "(", "\"Operation time out.\"", ")" ]
Get a dictionary of mappings between vertices and enumerated attributes .
def get_attribute_mappings ( self ) : att_ind_start = len ( self . graph . vs ) att_mappings = defaultdict ( list ) att_ind_end = self . _add_differential_expression_attributes ( att_ind_start , att_mappings ) if "associated_diseases" in self . graph . vs . attributes ( ) : self . _add_disease_association_attributes ( att_ind_end , att_mappings ) return att_mappings
10,653
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L36-L46
[ "def", "mock_xray_client", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "_wrapped", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "print", "(", "\"Starting X-Ray Patch\"", ")", "old_xray_context_var", "=", "os", ".", "environ", ".", "get", "(", "'AWS_XRAY_CONTEXT_MISSING'", ")", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "'LOG_ERROR'", "old_xray_context", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "old_xray_emitter", "=", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "AWSContext", "(", ")", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "MockEmitter", "(", ")", "try", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "if", "old_xray_context_var", "is", "None", ":", "del", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "else", ":", "os", ".", "environ", "[", "'AWS_XRAY_CONTEXT_MISSING'", "]", "=", "old_xray_context_var", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_emitter", "=", "old_xray_emitter", "aws_xray_sdk", ".", "core", ".", "xray_recorder", ".", "_context", "=", "old_xray_context", "return", "_wrapped" ]
Add differential expression information to the attribute mapping dictionary .
def _add_differential_expression_attributes ( self , att_ind_start , att_mappings ) : up_regulated_ind = self . graph . vs . select ( up_regulated_eq = True ) . indices down_regulated_ind = self . graph . vs . select ( down_regulated_eq = True ) . indices rest_ind = self . graph . vs . select ( diff_expressed_eq = False ) . indices self . _add_attribute_values ( att_ind_start + 1 , att_mappings , up_regulated_ind ) self . _add_attribute_values ( att_ind_start + 2 , att_mappings , down_regulated_ind ) self . _add_attribute_values ( att_ind_start + 3 , att_mappings , rest_ind ) return att_ind_start + 4
10,654
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L48-L62
[ "def", "cycle_file", "(", "source_plaintext_filename", ")", ":", "# Create a static random master key provider", "key_id", "=", "os", ".", "urandom", "(", "8", ")", "master_key_provider", "=", "StaticRandomMasterKeyProvider", "(", ")", "master_key_provider", ".", "add_master_key", "(", "key_id", ")", "ciphertext_filename", "=", "source_plaintext_filename", "+", "\".encrypted\"", "cycled_plaintext_filename", "=", "source_plaintext_filename", "+", "\".decrypted\"", "# Encrypt the plaintext source data", "with", "open", "(", "source_plaintext_filename", ",", "\"rb\"", ")", "as", "plaintext", ",", "open", "(", "ciphertext_filename", ",", "\"wb\"", ")", "as", "ciphertext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "mode", "=", "\"e\"", ",", "source", "=", "plaintext", ",", "key_provider", "=", "master_key_provider", ")", "as", "encryptor", ":", "for", "chunk", "in", "encryptor", ":", "ciphertext", ".", "write", "(", "chunk", ")", "# Decrypt the ciphertext", "with", "open", "(", "ciphertext_filename", ",", "\"rb\"", ")", "as", "ciphertext", ",", "open", "(", "cycled_plaintext_filename", ",", "\"wb\"", ")", "as", "plaintext", ":", "with", "aws_encryption_sdk", ".", "stream", "(", "mode", "=", "\"d\"", ",", "source", "=", "ciphertext", ",", "key_provider", "=", "master_key_provider", ")", "as", "decryptor", ":", "for", "chunk", "in", "decryptor", ":", "plaintext", ".", "write", "(", "chunk", ")", "# Verify that the \"cycled\" (encrypted, then decrypted) plaintext is identical to the source", "# plaintext", "assert", "filecmp", ".", "cmp", "(", "source_plaintext_filename", ",", "cycled_plaintext_filename", ")", "# Verify that the encryption context used in the decrypt operation includes all key pairs from", "# the encrypt operation", "#", "# In production, always use a meaningful encryption context. In this sample, we omit the", "# encryption context (no key pairs).", "assert", "all", "(", "pair", "in", "decryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", "for", "pair", "in", "encryptor", ".", "header", ".", "encryption_context", ".", "items", "(", ")", ")", "return", "ciphertext_filename", ",", "cycled_plaintext_filename" ]
Add an attribute value to the given vertices .
def _add_attribute_values ( self , value , att_mappings , indices ) : for i in indices : att_mappings [ i ] . append ( value )
10,655
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L64-L72
[ "def", "get_partition", "(", "url", ",", "headers", ",", "source_id", ",", "container", ",", "partition", ")", ":", "accepted_formats", "=", "list", "(", "serializer", ".", "format_registry", ".", "keys", "(", ")", ")", "accepted_compression", "=", "list", "(", "serializer", ".", "compression_registry", ".", "keys", "(", ")", ")", "payload", "=", "dict", "(", "action", "=", "'read'", ",", "source_id", "=", "source_id", ",", "accepted_formats", "=", "accepted_formats", ",", "accepted_compression", "=", "accepted_compression", ")", "if", "partition", "is", "not", "None", ":", "payload", "[", "'partition'", "]", "=", "partition", "try", ":", "resp", "=", "requests", ".", "post", "(", "urljoin", "(", "url", ",", "'/v1/source'", ")", ",", "data", "=", "msgpack", ".", "packb", "(", "payload", ",", "use_bin_type", "=", "True", ")", ",", "*", "*", "headers", ")", "if", "resp", ".", "status_code", "!=", "200", ":", "raise", "Exception", "(", "'Error reading data'", ")", "msg", "=", "msgpack", ".", "unpackb", "(", "resp", ".", "content", ",", "*", "*", "unpack_kwargs", ")", "format", "=", "msg", "[", "'format'", "]", "compression", "=", "msg", "[", "'compression'", "]", "compressor", "=", "serializer", ".", "compression_registry", "[", "compression", "]", "encoder", "=", "serializer", ".", "format_registry", "[", "format", "]", "chunk", "=", "encoder", ".", "decode", "(", "compressor", ".", "decompress", "(", "msg", "[", "'data'", "]", ")", ",", "container", ")", "return", "chunk", "finally", ":", "if", "resp", "is", "not", "None", ":", "resp", ".", "close", "(", ")" ]
Add disease association information to the attribute mapping dictionary .
def _add_disease_association_attributes ( self , att_ind_start , att_mappings ) : disease_mappings = self . get_disease_mappings ( att_ind_start ) for vertex in self . graph . vs : assoc_diseases = vertex [ "associated_diseases" ] if assoc_diseases is not None : assoc_disease_ids = [ disease_mappings [ disease ] for disease in assoc_diseases ] att_mappings [ vertex . index ] . extend ( assoc_disease_ids )
10,656
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L74-L85
[ "def", "_get_license_description", "(", "license_code", ")", ":", "req", "=", "requests", ".", "get", "(", "\"{base_url}/licenses/{license_code}\"", ".", "format", "(", "base_url", "=", "BASE_URL", ",", "license_code", "=", "license_code", ")", ",", "headers", "=", "_HEADERS", ")", "if", "req", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "s", "=", "req", ".", "json", "(", ")", "[", "\"body\"", "]", "search_curly", "=", "re", ".", "search", "(", "r'\\{(.*)\\}'", ",", "s", ")", "search_square", "=", "re", ".", "search", "(", "r'\\[(.*)\\]'", ",", "s", ")", "license", "=", "\"\"", "replace_string", "=", "'{year} {name}'", ".", "format", "(", "year", "=", "date", ".", "today", "(", ")", ".", "year", ",", "name", "=", "_get_config_name", "(", ")", ")", "if", "search_curly", ":", "license", "=", "re", ".", "sub", "(", "r'\\{(.+)\\}'", ",", "replace_string", ",", "s", ")", "elif", "search_square", ":", "license", "=", "re", ".", "sub", "(", "r'\\[(.+)\\]'", ",", "replace_string", ",", "s", ")", "else", ":", "license", "=", "s", "return", "license", "else", ":", "print", "(", "Fore", ".", "RED", "+", "'No such license. Please check again.'", ")", ",", "print", "(", "Style", ".", "RESET_ALL", ")", ",", "sys", ".", "exit", "(", ")" ]
Get a dictionary of enumerations for diseases .
def get_disease_mappings ( self , att_ind_start ) : all_disease_ids = self . get_all_unique_diseases ( ) disease_enum = enumerate ( all_disease_ids , start = att_ind_start ) disease_mappings = { } for num , dis in disease_enum : disease_mappings [ dis ] = num return disease_mappings
10,657
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L87-L98
[ "async", "def", "postprocess_websocket", "(", "self", ",", "response", ":", "Optional", "[", "Response", "]", ",", "websocket_context", ":", "Optional", "[", "WebsocketContext", "]", "=", "None", ",", ")", "->", "Response", ":", "websocket_", "=", "(", "websocket_context", "or", "_websocket_ctx_stack", ".", "top", ")", ".", "websocket", "functions", "=", "(", "websocket_context", "or", "_websocket_ctx_stack", ".", "top", ")", ".", "_after_websocket_functions", "blueprint", "=", "websocket_", ".", "blueprint", "if", "blueprint", "is", "not", "None", ":", "functions", "=", "chain", "(", "functions", ",", "self", ".", "after_websocket_funcs", "[", "blueprint", "]", ")", "functions", "=", "chain", "(", "functions", ",", "self", ".", "after_websocket_funcs", "[", "None", "]", ")", "for", "function", "in", "functions", ":", "response", "=", "await", "function", "(", "response", ")", "session_", "=", "(", "websocket_context", "or", "_request_ctx_stack", ".", "top", ")", ".", "session", "if", "not", "self", ".", "session_interface", ".", "is_null_session", "(", "session_", ")", ":", "if", "response", "is", "None", "and", "isinstance", "(", "session_", ",", "SecureCookieSession", ")", "and", "session_", ".", "modified", ":", "self", ".", "logger", ".", "exception", "(", "\"Secure Cookie Session modified during websocket handling. \"", "\"These modifications will be lost as a cookie cannot be set.\"", ")", "else", ":", "await", "self", ".", "save_session", "(", "session_", ",", "response", ")", "return", "response" ]
Get all unique diseases that are known to the network .
def get_all_unique_diseases ( self ) : all_disease_ids = self . graph . vs [ "associated_diseases" ] # remove None values from list all_disease_ids = [ lst for lst in all_disease_ids if lst is not None ] # flatten list of lists, get unique elements all_disease_ids = list ( set ( [ id for sublist in all_disease_ids for id in sublist ] ) ) return all_disease_ids
10,658
https://github.com/GuiltyTargets/ppi-network-annotation/blob/4d7b6713485f2d0a0957e6457edc1b1b5a237460/src/ppi_network_annotation/model/attribute_network.py#L100-L110
[ "def", "use_categories_as_metadata_and_replace_terms", "(", "self", ")", ":", "new_metadata_factory", "=", "CSRMatrixFactory", "(", ")", "for", "i", ",", "category_idx", "in", "enumerate", "(", "self", ".", "get_category_ids", "(", ")", ")", ":", "new_metadata_factory", "[", "i", ",", "category_idx", "]", "=", "1", "new_metadata", "=", "new_metadata_factory", ".", "get_csr_matrix", "(", ")", "new_tdm", "=", "self", ".", "_make_new_term_doc_matrix", "(", "self", ".", "_mX", ",", "new_metadata", ",", "self", ".", "_y", ",", "self", ".", "_metadata_idx_store", ",", "self", ".", "_category_idx_store", ",", "copy", "(", "self", ".", "_category_idx_store", ")", ",", "self", ".", "_y", "==", "self", ".", "_y", ")", "return", "new_tdm" ]
Page view decorator .
def page_view ( url ) : def decorator ( func ) : @ wraps ( func ) async def wrapper ( self : BaseState , * args , * * kwargs ) : user_id = self . request . user . id try : user_lang = await self . request . user . get_locale ( ) except NotImplementedError : user_lang = '' title = self . __class__ . __name__ # noinspection PyTypeChecker async for p in providers ( ) : await p . page_view ( url , title , user_id , user_lang ) return await func ( self , * args , * * kwargs ) return wrapper return decorator
10,659
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/analytics/_helpers.py#L14-L42
[ "def", "get_stores_secrets_volumes", "(", "cls", ",", "stores_secrets", ")", ":", "volumes", "=", "[", "]", "volume_mounts", "=", "[", "]", "for", "store_secret", "in", "stores_secrets", ":", "store", "=", "store_secret", "[", "'store'", "]", "if", "store", "in", "{", "GCS", ",", "S3", "}", ":", "secrets_volumes", ",", "secrets_volume_mounts", "=", "get_volume_from_secret", "(", "volume_name", "=", "cls", ".", "STORE_SECRET_VOLUME_NAME", ".", "format", "(", "store", ")", ",", "mount_path", "=", "cls", ".", "STORE_SECRET_KEY_MOUNT_PATH", ".", "format", "(", "store", ")", ",", "secret_name", "=", "store_secret", "[", "'persistence_secret'", "]", ",", ")", "volumes", "+=", "secrets_volumes", "volume_mounts", "+=", "secrets_volume_mounts", "return", "volumes", ",", "volume_mounts" ]
Parses the COBOL - converts the COBOL line into a dictionary containing the information - parses the pic information into type length precision - ~~handles redefines~~ - > our implementation does not do that anymore because we want to display item that was redefined .
def parse_cobol ( lines ) : output = [ ] intify = [ "level" , "occurs" ] # All in 1 line now, let's parse for row in lines : match = CobolPatterns . row_pattern . match ( row . strip ( ) ) if not match : _logger ( ) . warning ( "Found unmatched row %s" % row . strip ( ) ) continue match = match . groupdict ( ) for i in intify : match [ i ] = int ( match [ i ] ) if match [ i ] is not None else None if match [ 'pic' ] is not None : match [ 'pic_info' ] = parse_pic_string ( match [ 'pic' ] ) output . append ( match ) return output
10,660
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/pic.py#L114-L143
[ "def", "acquire_writer", "(", "self", ")", ":", "with", "self", ".", "mutex", ":", "while", "self", ".", "rwlock", "!=", "0", ":", "self", ".", "_writer_wait", "(", ")", "self", ".", "rwlock", "=", "-", "1" ]
Clean the names .
def clean_names ( lines , ensure_unique_names = False , strip_prefix = False , make_database_safe = False ) : names = { } for row in lines : if strip_prefix : row [ 'name' ] = row [ 'name' ] [ row [ 'name' ] . find ( '-' ) + 1 : ] if row [ 'indexed_by' ] is not None : row [ 'indexed_by' ] = row [ 'indexed_by' ] [ row [ 'indexed_by' ] . find ( '-' ) + 1 : ] if ensure_unique_names : i = 1 while ( row [ 'name' ] if i == 1 else row [ 'name' ] + "-" + str ( i ) ) in names : i += 1 names [ row [ 'name' ] if i == 1 else row [ 'name' ] + "-" + str ( i ) ] = 1 if i > 1 : row [ 'name' ] = row [ 'name' ] + "-" + str ( i ) if make_database_safe : row [ 'name' ] = row [ 'name' ] . replace ( "-" , "_" ) return lines
10,661
https://github.com/pyQode/pyqode.cobol/blob/eedae4e320a4b2d0c44abb2c3061091321648fb7/pyqode/cobol/api/parsers/pic.py#L230-L263
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Return an application instance created from YAML .
def create_app_from_yml ( path ) : try : with open ( path , "rt" , encoding = "UTF-8" ) as f : try : # Substitute ALL occurrences of '%(here)s' with a path to a # directory with '.holocron.yml'. Please note, we also want # wrap the result into 'io.StringIO' in order to preserve # original filename in 'yaml.safe_load()' errors. interpolated = io . StringIO ( f . read ( ) % { "here" : os . path . abspath ( os . path . dirname ( path ) ) } ) interpolated . name = f . name conf = yaml . safe_load ( interpolated ) except yaml . YAMLError as exc : raise RuntimeError ( "Cannot parse a configuration file. Context: " + str ( exc ) ) except FileNotFoundError : conf = { "metadata" : None , "pipes" : { } } return core . create_app ( conf [ "metadata" ] , pipes = conf [ "pipes" ] )
10,662
https://github.com/ikalnytskyi/holocron/blob/d202f6bccfeca64162857c6d0ee5bb53e773d2f2/src/holocron/__main__.py#L18-L40
[ "def", "wait", "(", "timeout", ":", "Optional", "[", "float", "]", "=", "None", ")", "->", "Iterator", "[", "Any", "]", ":", "if", "timeout", "is", "not", "None", ":", "tcod", ".", "lib", ".", "SDL_WaitEventTimeout", "(", "tcod", ".", "ffi", ".", "NULL", ",", "int", "(", "timeout", "*", "1000", ")", ")", "else", ":", "tcod", ".", "lib", ".", "SDL_WaitEvent", "(", "tcod", ".", "ffi", ".", "NULL", ")", "return", "get", "(", ")" ]
Configure a root logger to print records in pretty format .
def configure_logger ( level ) : class _Formatter ( logging . Formatter ) : def format ( self , record ) : record . levelname = record . levelname [ : 4 ] return super ( _Formatter , self ) . format ( record ) # create stream handler with custom formatter stream_handler = logging . StreamHandler ( ) stream_handler . setFormatter ( _Formatter ( "[%(levelname)s] %(message)s" ) ) # configure root logger logger = logging . getLogger ( ) logger . addHandler ( stream_handler ) logger . setLevel ( level ) # capture warnings issued by 'warnings' module logging . captureWarnings ( True )
10,663
https://github.com/ikalnytskyi/holocron/blob/d202f6bccfeca64162857c6d0ee5bb53e773d2f2/src/holocron/__main__.py#L43-L73
[ "def", "wait_until_internet", "(", "time_between_attempts", "=", "3", ",", "max_attempts", "=", "10", ")", ":", "counter", "=", "0", "while", "not", "is_internet_on", "(", ")", ":", "time", ".", "sleep", "(", "time_between_attempts", ")", "# wait until internet is on", "counter", "+=", "1", "if", "counter", ">", "max_attempts", ":", "return", "False", "return", "True" ]
Builds a command line interface and parses its arguments . Returns an object with attributes that are represent CLI arguments .
def parse_command_line ( args ) : parser = argparse . ArgumentParser ( description = ( "Holocron is an easy and lightweight static blog generator, " "based on markup text and Jinja2 templates." ) , epilog = ( "With no CONF, read .holocron.yml in the current working dir. " "If no CONF found, the default settings will be used." ) ) parser . add_argument ( "-c" , "--conf" , dest = "conf" , default = ".holocron.yml" , help = "set path to the settings file" ) parser . add_argument ( "-q" , "--quiet" , dest = "verbosity" , action = "store_const" , const = logging . CRITICAL , help = "show only critical errors" ) parser . add_argument ( "-v" , "--verbose" , dest = "verbosity" , action = "store_const" , const = logging . INFO , help = "show additional messages" ) parser . add_argument ( "-d" , "--debug" , dest = "verbosity" , action = "store_const" , const = logging . DEBUG , help = "show all messages" ) parser . add_argument ( "--version" , action = "version" , version = pkg_resources . get_distribution ( "holocron" ) . version , help = "show the holocron version and exit" ) command_parser = parser . add_subparsers ( dest = "command" , help = "command to execute" ) run_parser = command_parser . add_parser ( "run" ) run_parser . add_argument ( "pipe" , help = "a pipe to run" ) # parse cli and form arguments object arguments = parser . parse_args ( args ) # if no commands are specified display help if arguments . command is None : parser . print_help ( ) parser . exit ( 1 ) return arguments
10,664
https://github.com/ikalnytskyi/holocron/blob/d202f6bccfeca64162857c6d0ee5bb53e773d2f2/src/holocron/__main__.py#L76-L127
[ "def", "queries", "(", "self", ",", "request", ")", ":", "queries", "=", "self", ".", "get_queries", "(", "request", ")", "worlds", "=", "[", "]", "with", "self", ".", "mapper", ".", "begin", "(", ")", "as", "session", ":", "for", "_", "in", "range", "(", "queries", ")", ":", "world", "=", "session", ".", "query", "(", "World", ")", ".", "get", "(", "randint", "(", "1", ",", "MAXINT", ")", ")", "worlds", ".", "append", "(", "self", ".", "get_json", "(", "world", ")", ")", "return", "Json", "(", "worlds", ")", ".", "http_response", "(", "request", ")" ]
If we re going through a syntax error add the directory of the error to the watchlist .
def _list_syntax_error ( ) : _ , e , _ = sys . exc_info ( ) if isinstance ( e , SyntaxError ) and hasattr ( e , 'filename' ) : yield path . dirname ( e . filename )
10,665
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/cli/_live_reload.py#L51-L59
[ "def", "show_partitioning", "(", "rdd", ",", "show", "=", "True", ")", ":", "if", "show", ":", "partitionCount", "=", "rdd", ".", "getNumPartitions", "(", ")", "try", ":", "valueCount", "=", "rdd", ".", "countApprox", "(", "1000", ",", "confidence", "=", "0.50", ")", "except", ":", "valueCount", "=", "-", "1", "try", ":", "name", "=", "rdd", ".", "name", "(", ")", "or", "None", "except", ":", "pass", "name", "=", "name", "or", "\"anonymous\"", "logging", ".", "info", "(", "\"For RDD %s, there are %d partitions with on average %s values\"", "%", "(", "name", ",", "partitionCount", ",", "int", "(", "valueCount", "/", "float", "(", "partitionCount", ")", ")", ")", ")" ]
List all directories known to hold project code .
def list_dirs ( ) : out = set ( ) out . update ( _list_config_dirs ( ) ) out . update ( _list_module_dirs ( ) ) out . update ( _list_syntax_error ( ) ) return out
10,666
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/cli/_live_reload.py#L62-L71
[ "async", "def", "register", "(", "self", ",", "request", ")", ":", "session", "=", "await", "get_session", "(", "request", ")", "user_id", "=", "session", ".", "get", "(", "'user_id'", ")", "if", "user_id", ":", "return", "redirect", "(", "request", ",", "'timeline'", ")", "error", "=", "None", "form", "=", "None", "if", "request", ".", "method", "==", "'POST'", ":", "form", "=", "await", "request", ".", "post", "(", ")", "user_id", "=", "await", "db", ".", "get_user_id", "(", "self", ".", "mongo", ".", "user", ",", "form", "[", "'username'", "]", ")", "if", "not", "form", "[", "'username'", "]", ":", "error", "=", "'You have to enter a username'", "elif", "not", "form", "[", "'email'", "]", "or", "'@'", "not", "in", "form", "[", "'email'", "]", ":", "error", "=", "'You have to enter a valid email address'", "elif", "not", "form", "[", "'password'", "]", ":", "error", "=", "'You have to enter a password'", "elif", "form", "[", "'password'", "]", "!=", "form", "[", "'password2'", "]", ":", "error", "=", "'The two passwords do not match'", "elif", "user_id", "is", "not", "None", ":", "error", "=", "'The username is already taken'", "else", ":", "await", "self", ".", "mongo", ".", "user", ".", "insert", "(", "{", "'username'", ":", "form", "[", "'username'", "]", ",", "'email'", ":", "form", "[", "'email'", "]", ",", "'pw_hash'", ":", "generate_password_hash", "(", "form", "[", "'password'", "]", ")", "}", ")", "return", "redirect", "(", "request", ",", "'login'", ")", "return", "{", "\"error\"", ":", "error", ",", "\"form\"", ":", "form", "}" ]
Start the child process that will look for changes in modules .
async def start_child ( ) : logger . info ( 'Started to watch for code changes' ) loop = asyncio . get_event_loop ( ) watcher = aionotify . Watcher ( ) flags = ( aionotify . Flags . MODIFY | aionotify . Flags . DELETE | aionotify . Flags . ATTRIB | aionotify . Flags . MOVED_TO | aionotify . Flags . MOVED_FROM | aionotify . Flags . CREATE | aionotify . Flags . DELETE_SELF | aionotify . Flags . MOVE_SELF ) watched_dirs = list_dirs ( ) for dir_name in watched_dirs : watcher . watch ( path = dir_name , flags = flags ) await watcher . setup ( loop ) while True : evt = await watcher . get_event ( ) file_path = path . join ( evt . alias , evt . name ) if file_path in watched_dirs or file_path . endswith ( '.py' ) : await asyncio . sleep ( settings . CODE_RELOAD_DEBOUNCE ) break watcher . close ( ) exit_for_reload ( )
10,667
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/cli/_live_reload.py#L83-L120
[ "def", "setAltInterface", "(", "self", ",", "alternate", ")", ":", "if", "isinstance", "(", "alternate", ",", "Interface", ")", ":", "alternate", "=", "alternate", ".", "alternateSetting", "self", ".", "dev", ".", "set_interface_altsetting", "(", "self", ".", "__claimed_interface", ",", "alternate", ")" ]
Start the parent that will simply run the child forever until stopped .
def start_parent ( ) : while True : args = [ sys . executable ] + sys . argv new_environ = environ . copy ( ) new_environ [ "_IN_CHILD" ] = 'yes' ret = subprocess . call ( args , env = new_environ ) if ret != settings . CODE_RELOAD_EXIT : return ret
10,668
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/cli/_live_reload.py#L123-L135
[ "def", "unindex_layers_with_issues", "(", "self", ",", "use_cache", "=", "False", ")", ":", "from", "hypermap", ".", "aggregator", ".", "models", "import", "Issue", ",", "Layer", ",", "Service", "from", "django", ".", "contrib", ".", "contenttypes", ".", "models", "import", "ContentType", "layer_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Layer", ")", "service_type", "=", "ContentType", ".", "objects", ".", "get_for_model", "(", "Service", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "layer_type", ".", "id", ")", ":", "unindex_layer", "(", "issue", ".", "content_object", ".", "id", ",", "use_cache", ")", "for", "issue", "in", "Issue", ".", "objects", ".", "filter", "(", "content_type__pk", "=", "service_type", ".", "id", ")", ":", "for", "layer", "in", "issue", ".", "content_object", ".", "layer_set", ".", "all", "(", ")", ":", "unindex_layer", "(", "layer", ".", "id", ",", "use_cache", ")" ]
Try to read a value named key from the GET parameters .
def get_from_params ( request , key ) : data = getattr ( request , 'json' , None ) or request . values value = data . get ( key ) return to_native ( value )
10,669
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/wsgi/werkzeug.py#L55-L60
[ "def", "PrependUOffsetTRelative", "(", "self", ",", "off", ")", ":", "# Ensure alignment is already done:", "self", ".", "Prep", "(", "N", ".", "UOffsetTFlags", ".", "bytewidth", ",", "0", ")", "if", "not", "(", "off", "<=", "self", ".", "Offset", "(", ")", ")", ":", "msg", "=", "\"flatbuffers: Offset arithmetic error.\"", "raise", "OffsetArithmeticError", "(", "msg", ")", "off2", "=", "self", ".", "Offset", "(", ")", "-", "off", "+", "N", ".", "UOffsetTFlags", ".", "bytewidth", "self", ".", "PlaceUOffsetT", "(", "off2", ")" ]
Try to read a value named key from the headers .
def get_from_headers ( request , key ) : value = request . headers . get ( key ) return to_native ( value )
10,670
https://github.com/jpscaletti/authcode/blob/91529b6d0caec07d1452758d937e1e0745826139/authcode/wsgi/werkzeug.py#L63-L67
[ "def", "cartesian_square_centred_on_point", "(", "self", ",", "point", ",", "distance", ",", "*", "*", "kwargs", ")", ":", "point_surface", "=", "Point", "(", "point", ".", "longitude", ",", "point", ".", "latitude", ",", "0.", ")", "# As distance is", "north_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "0.", ")", "east_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "90.", ")", "south_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "180.", ")", "west_point", "=", "point_surface", ".", "point_at", "(", "distance", ",", "0.", ",", "270.", ")", "is_long", "=", "np", ".", "logical_and", "(", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", ">=", "west_point", ".", "longitude", ",", "self", ".", "catalogue", ".", "data", "[", "'longitude'", "]", "<", "east_point", ".", "longitude", ")", "is_surface", "=", "np", ".", "logical_and", "(", "is_long", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", ">=", "south_point", ".", "latitude", ",", "self", ".", "catalogue", ".", "data", "[", "'latitude'", "]", "<", "north_point", ".", "latitude", ")", "upper_depth", ",", "lower_depth", "=", "_check_depth_limits", "(", "kwargs", ")", "is_valid", "=", "np", ".", "logical_and", "(", "is_surface", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", ">=", "upper_depth", ",", "self", ".", "catalogue", ".", "data", "[", "'depth'", "]", "<", "lower_depth", ")", "return", "self", ".", "select_catalogue", "(", "is_valid", ")" ]
Handle here the asynchronous part of the init .
async def async_init ( self ) : self . pool = await aioredis . create_pool ( ( self . host , self . port ) , db = self . db_id , minsize = self . min_pool_size , maxsize = self . max_pool_size , loop = asyncio . get_event_loop ( ) , )
10,671
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/storage/redis.py#L41-L52
[ "def", "_get_port_speed_price_id", "(", "items", ",", "port_speed", ",", "no_public", ",", "location", ")", ":", "for", "item", "in", "items", ":", "if", "utils", ".", "lookup", "(", "item", ",", "'itemCategory'", ",", "'categoryCode'", ")", "!=", "'port_speed'", ":", "continue", "# Check for correct capacity and if the item matches private only", "if", "any", "(", "[", "int", "(", "utils", ".", "lookup", "(", "item", ",", "'capacity'", ")", ")", "!=", "port_speed", ",", "_is_private_port_speed_item", "(", "item", ")", "!=", "no_public", ",", "not", "_is_bonded", "(", "item", ")", "]", ")", ":", "continue", "for", "price", "in", "item", "[", "'prices'", "]", ":", "if", "not", "_matches_location", "(", "price", ",", "location", ")", ":", "continue", "return", "price", "[", "'id'", "]", "raise", "SoftLayer", ".", "SoftLayerError", "(", "\"Could not find valid price for port speed: '%s'\"", "%", "port_speed", ")" ]
Attempts to serialize values from a dictionary skipping private attrs .
def serialize ( d ) : ret = { } for k , v in d . items ( ) : if not k . startswith ( '_' ) : ret [ k ] = str ( d [ k ] ) #ret['__class__'] = obj.__class__.__name__ return ret
10,672
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/util/util.py#L31-L44
[ "def", "get_max_events_in_both_arrays", "(", "events_one", ",", "events_two", ")", ":", "events_one", "=", "np", ".", "ascontiguousarray", "(", "events_one", ")", "# change memory alignement for c++ library", "events_two", "=", "np", ".", "ascontiguousarray", "(", "events_two", ")", "# change memory alignement for c++ library", "event_result", "=", "np", ".", "empty", "(", "shape", "=", "(", "events_one", ".", "shape", "[", "0", "]", "+", "events_two", ".", "shape", "[", "0", "]", ",", ")", ",", "dtype", "=", "events_one", ".", "dtype", ")", "count", "=", "analysis_functions", ".", "get_max_events_in_both_arrays", "(", "events_one", ",", "events_two", ",", "event_result", ")", "return", "event_result", "[", ":", "count", "]" ]
Initialize Git user config file .
def user_config ( * * kwargs ) : for kw in kwargs : git ( 'config --global user.%s "%s"' % ( kw , kwargs . get ( kw ) ) ) . wait ( )
10,673
https://github.com/rsgalloway/grit/blob/e6434ad8a1f4ac5d0903ebad630c81f8a5164d78/grit/util/util.py#L51-L58
[ "def", "remove_async_sns_topic", "(", "self", ",", "lambda_name", ")", ":", "topic_name", "=", "get_topic_name", "(", "lambda_name", ")", "removed_arns", "=", "[", "]", "for", "sub", "in", "self", ".", "sns_client", ".", "list_subscriptions", "(", ")", "[", "'Subscriptions'", "]", ":", "if", "topic_name", "in", "sub", "[", "'TopicArn'", "]", ":", "self", ".", "sns_client", ".", "delete_topic", "(", "TopicArn", "=", "sub", "[", "'TopicArn'", "]", ")", "removed_arns", ".", "append", "(", "sub", "[", "'TopicArn'", "]", ")", "return", "removed_arns" ]
Make a JWT header
def _make_header ( self , token_type = None , signing_algorithm = None ) : if not token_type : token_type = self . token_type if not signing_algorithm : signing_algorithm = self . signing_algorithm header = { 'typ' : token_type , 'alg' : signing_algorithm } return header
10,674
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_signer.py#L38-L49
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Sign a serialized header and payload . Return the urlsafe - base64 - encoded signature .
def _make_signature ( self , header_b64 , payload_b64 , signing_key ) : token_segments = [ header_b64 , payload_b64 ] signing_input = b'.' . join ( token_segments ) signer = self . _get_signer ( signing_key ) signer . update ( signing_input ) signature = signer . finalize ( ) raw_signature = der_to_raw_signature ( signature , signing_key . curve ) return base64url_encode ( raw_signature )
10,675
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_signer.py#L52-L65
[ "def", "_getNearestMappingIndexList", "(", "fromValList", ",", "toValList", ")", ":", "indexList", "=", "[", "]", "for", "fromTimestamp", "in", "fromValList", ":", "smallestDiff", "=", "_getSmallestDifference", "(", "toValList", ",", "fromTimestamp", ")", "i", "=", "toValList", ".", "index", "(", "smallestDiff", ")", "indexList", ".", "append", "(", "i", ")", "return", "indexList" ]
Make a multi - signature JWT . Returns a JSON - structured JWT .
def _sign_multi ( self , payload , signing_keys ) : if not isinstance ( payload , Mapping ) : raise TypeError ( 'Expecting a mapping object, as only ' 'JSON objects can be used as payloads.' ) if not isinstance ( signing_keys , list ) : raise TypeError ( "Expecting a list of keys" ) headers = [ ] signatures = [ ] payload_b64 = base64url_encode ( json_encode ( payload ) ) for sk in signing_keys : signing_key = load_signing_key ( sk , self . crypto_backend ) header = self . _make_header ( ) header_b64 = base64url_encode ( json_encode ( header ) ) signature_b64 = self . _make_signature ( header_b64 , payload_b64 , signing_key ) headers . append ( header_b64 ) signatures . append ( signature_b64 ) jwt = { "header" : headers , "payload" : payload_b64 , "signature" : signatures } return jwt
10,676
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_signer.py#L93-L127
[ "async", "def", "add_unknown_id", "(", "self", ",", "unknown_id", ",", "timeout", "=", "OTGW_DEFAULT_TIMEOUT", ")", ":", "cmd", "=", "OTGW_CMD_UNKNOWN_ID", "unknown_id", "=", "int", "(", "unknown_id", ")", "if", "unknown_id", "<", "1", "or", "unknown_id", ">", "255", ":", "return", "None", "ret", "=", "await", "self", ".", "_wait_for_cmd", "(", "cmd", ",", "unknown_id", ",", "timeout", ")", "if", "ret", "is", "not", "None", ":", "return", "int", "(", "ret", ")" ]
Create a JWT with one or more keys . Returns a compact - form serialized JWT if there is only one key to sign with Returns a JSON - structured serialized JWT if there are multiple keys to sign with
def sign ( self , payload , signing_key_or_keys ) : if isinstance ( signing_key_or_keys , list ) : return self . _sign_multi ( payload , signing_key_or_keys ) else : return self . _sign_single ( payload , signing_key_or_keys )
10,677
https://github.com/blockstack-packages/jsontokens-py/blob/1a4e71ed63456e8381b7d3fd566ce38e6ebfa7d3/jsontokens/token_signer.py#L130-L140
[ "def", "future_set_exception_unless_cancelled", "(", "future", ":", "\"Union[futures.Future[_T], Future[_T]]\"", ",", "exc", ":", "BaseException", ")", "->", "None", ":", "if", "not", "future", ".", "cancelled", "(", ")", ":", "future", ".", "set_exception", "(", "exc", ")", "else", ":", "app_log", ".", "error", "(", "\"Exception after Future was cancelled\"", ",", "exc_info", "=", "exc", ")" ]
Recursively parse requirements from nested pip files .
def parse_reqs ( req_path = './requirements/requirements.txt' ) : install_requires = [ ] with codecs . open ( req_path , 'r' ) as handle : # remove comments and empty lines lines = ( line . strip ( ) for line in handle if line . strip ( ) and not line . startswith ( '#' ) ) for line in lines : # check for nested requirements files if line . startswith ( '-r' ) : # recursively call this function install_requires += parse_reqs ( req_path = line [ 3 : ] ) else : # add the line as a new requirement install_requires . append ( line ) return install_requires
10,678
https://github.com/pyslackers/sir-bot-a-lot/blob/22dfdd6a14d61dbe29423fd131b7a23e618b68d7/setup.py#L25-L42
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Report current exception to Sentry .
def report ( self , request : 'Request' = None , state : Text = None ) : self . _make_context ( request , state ) self . client . captureException ( ) self . _clear_context ( )
10,679
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/reporter/_raven.py#L68-L74
[ "def", "compute_score", "(", "subtitle", ",", "video", ",", "hearing_impaired", "=", "None", ")", ":", "logger", ".", "info", "(", "'Computing score of %r for video %r with %r'", ",", "subtitle", ",", "video", ",", "dict", "(", "hearing_impaired", "=", "hearing_impaired", ")", ")", "# get the scores dict", "scores", "=", "get_scores", "(", "video", ")", "logger", ".", "debug", "(", "'Using scores %r'", ",", "scores", ")", "# get the matches", "matches", "=", "subtitle", ".", "get_matches", "(", "video", ")", "logger", ".", "debug", "(", "'Found matches %r'", ",", "matches", ")", "# on hash match, discard everything else", "if", "'hash'", "in", "matches", ":", "logger", ".", "debug", "(", "'Keeping only hash match'", ")", "matches", "&=", "{", "'hash'", "}", "# handle equivalent matches", "if", "isinstance", "(", "video", ",", "Episode", ")", ":", "if", "'title'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding title match equivalent'", ")", "matches", ".", "add", "(", "'episode'", ")", "if", "'series_imdb_id'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding series_imdb_id match equivalent'", ")", "matches", "|=", "{", "'series'", ",", "'year'", "}", "if", "'imdb_id'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding imdb_id match equivalents'", ")", "matches", "|=", "{", "'series'", ",", "'year'", ",", "'season'", ",", "'episode'", "}", "if", "'tvdb_id'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding tvdb_id match equivalents'", ")", "matches", "|=", "{", "'series'", ",", "'year'", ",", "'season'", ",", "'episode'", "}", "if", "'series_tvdb_id'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding series_tvdb_id match equivalents'", ")", "matches", "|=", "{", "'series'", ",", "'year'", "}", "elif", "isinstance", "(", "video", ",", "Movie", ")", ":", "if", "'imdb_id'", "in", "matches", ":", "logger", ".", "debug", "(", "'Adding imdb_id match equivalents'", ")", "matches", "|=", "{", "'title'", ",", "'year'", "}", "# handle hearing impaired", "if", "hearing_impaired", "is", "not", "None", "and", "subtitle", ".", "hearing_impaired", "==", "hearing_impaired", ":", "logger", ".", "debug", "(", "'Matched hearing_impaired'", ")", "matches", ".", "add", "(", "'hearing_impaired'", ")", "# compute the score", "score", "=", "sum", "(", "(", "scores", ".", "get", "(", "match", ",", "0", ")", "for", "match", "in", "matches", ")", ")", "logger", ".", "info", "(", "'Computed score %r with final matches %r'", ",", "score", ",", "matches", ")", "# ensure score is within valid bounds", "assert", "0", "<=", "score", "<=", "scores", "[", "'hash'", "]", "+", "scores", "[", "'hearing_impaired'", "]", "return", "score" ]
Validates the name and creates variations
def vary_name ( name : Text ) : snake = re . match ( r'^[a-z][a-z0-9]*(?:_[a-z0-9]+)*$' , name ) if not snake : fail ( 'The project name is not a valid snake-case Python variable name' ) camel = [ x [ 0 ] . upper ( ) + x [ 1 : ] for x in name . split ( '_' ) ] return { 'project_name_snake' : name , 'project_name_camel' : '' . join ( camel ) , 'project_name_readable' : ' ' . join ( camel ) , }
10,680
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L30-L46
[ "def", "setAsApplication", "(", "myappid", ")", ":", "if", "os", ".", "name", "==", "'nt'", ":", "import", "ctypes", "ctypes", ".", "windll", ".", "shell32", ".", "SetCurrentProcessExplicitAppUserModelID", "(", "myappid", ")" ]
Generates a secure random string
def make_random_key ( ) -> Text : r = SystemRandom ( ) allowed = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_+/[]' return '' . join ( [ r . choice ( allowed ) for _ in range ( 0 , 50 ) ] )
10,681
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L49-L58
[ "def", "getOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTransformTrackedDeviceRelative", "punTrackedDevice", "=", "TrackedDeviceIndex_t", "(", ")", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "punTrackedDevice", ")", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "punTrackedDevice", ",", "pmatTrackedDeviceToOverlayTransform" ]
Generates the target path for a directory
def make_dir_path ( project_dir , root , project_name ) : root = root . replace ( '__project_name_snake__' , project_name ) real_dir = path . realpath ( project_dir ) return path . join ( real_dir , root )
10,682
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L61-L68
[ "def", "_get_regional_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "rrup", ")", ":", "f3", "=", "interpolate", ".", "interp1d", "(", "[", "150", ",", "250", ",", "350", ",", "450", ",", "600", ",", "850", ",", "1150", ",", "2000", "]", ",", "[", "C", "[", "'a36'", "]", ",", "C", "[", "'a37'", "]", ",", "C", "[", "'a38'", "]", ",", "C", "[", "'a39'", "]", ",", "C", "[", "'a40'", "]", ",", "C", "[", "'a41'", "]", ",", "C", "[", "'a42'", "]", ",", "C", "[", "'a42'", "]", "]", ",", "kind", "=", "'linear'", ")", "return", "f3", "(", "vs30", ")", "+", "C", "[", "'a29'", "]", "*", "rrup" ]
Generates the target path for a file
def make_file_path ( project_dir , project_name , root , name ) : return path . join ( make_dir_path ( project_dir , root , project_name ) , name )
10,683
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L71-L76
[ "def", "basis", "(", "start", ",", "stop", "=", "None", ",", "dim", "=", "1", ",", "sort", "=", "\"G\"", ",", "cross_truncation", "=", "1.", ")", ":", "if", "stop", "is", "None", ":", "start", ",", "stop", "=", "numpy", ".", "array", "(", "0", ")", ",", "start", "start", "=", "numpy", ".", "array", "(", "start", ",", "dtype", "=", "int", ")", "stop", "=", "numpy", ".", "array", "(", "stop", ",", "dtype", "=", "int", ")", "dim", "=", "max", "(", "start", ".", "size", ",", "stop", ".", "size", ",", "dim", ")", "indices", "=", "numpy", ".", "array", "(", "chaospy", ".", "bertran", ".", "bindex", "(", "numpy", ".", "min", "(", "start", ")", ",", "2", "*", "numpy", ".", "max", "(", "stop", ")", ",", "dim", ",", "sort", ",", "cross_truncation", ")", ")", "if", "start", ".", "size", "==", "1", ":", "bellow", "=", "numpy", ".", "sum", "(", "indices", ",", "-", "1", ")", ">=", "start", "else", ":", "start", "=", "numpy", ".", "ones", "(", "dim", ",", "dtype", "=", "int", ")", "*", "start", "bellow", "=", "numpy", ".", "all", "(", "indices", "-", "start", ">=", "0", ",", "-", "1", ")", "if", "stop", ".", "size", "==", "1", ":", "above", "=", "numpy", ".", "sum", "(", "indices", ",", "-", "1", ")", "<=", "stop", ".", "item", "(", ")", "else", ":", "stop", "=", "numpy", ".", "ones", "(", "dim", ",", "dtype", "=", "int", ")", "*", "stop", "above", "=", "numpy", ".", "all", "(", "stop", "-", "indices", ">=", "0", ",", "-", "1", ")", "pool", "=", "list", "(", "indices", "[", "above", "*", "bellow", "]", ")", "arg", "=", "numpy", ".", "zeros", "(", "len", "(", "pool", ")", ",", "dtype", "=", "int", ")", "arg", "[", "0", "]", "=", "1", "poly", "=", "{", "}", "for", "idx", "in", "pool", ":", "idx", "=", "tuple", "(", "idx", ")", "poly", "[", "idx", "]", "=", "arg", "arg", "=", "numpy", ".", "roll", "(", "arg", ",", "1", ")", "x", "=", "numpy", ".", "zeros", "(", "len", "(", "pool", ")", ",", "dtype", "=", "int", ")", "x", "[", "0", "]", "=", "1", "A", "=", "{", "}", "for", "I", "in", "pool", ":", "I", "=", "tuple", "(", "I", ")", "A", "[", "I", "]", "=", "x", "x", "=", "numpy", ".", "roll", "(", "x", ",", "1", ")", "return", "Poly", "(", "A", ",", "dim", ")" ]
Generates the variables to replace in files
def generate_vars ( project_name , project_dir ) : out = vary_name ( project_name ) out [ 'random_key' ] = make_random_key ( ) out [ 'settings_file' ] = make_file_path ( project_dir , project_name , path . join ( 'src' , project_name ) , 'settings.py' , ) return out
10,684
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L79-L93
[ "def", "catalogFactory", "(", "name", ",", "*", "*", "kwargs", ")", ":", "fn", "=", "lambda", "member", ":", "inspect", ".", "isclass", "(", "member", ")", "and", "member", ".", "__module__", "==", "__name__", "catalogs", "=", "odict", "(", "inspect", ".", "getmembers", "(", "sys", ".", "modules", "[", "__name__", "]", ",", "fn", ")", ")", "if", "name", "not", "in", "list", "(", "catalogs", ".", "keys", "(", ")", ")", ":", "msg", "=", "\"%s not found in catalogs:\\n %s\"", "%", "(", "name", ",", "list", "(", "kernels", ".", "keys", "(", ")", ")", ")", "logger", ".", "error", "(", "msg", ")", "msg", "=", "\"Unrecognized catalog: %s\"", "%", "name", "raise", "Exception", "(", "msg", ")", "return", "catalogs", "[", "name", "]", "(", "*", "*", "kwargs", ")" ]
Read all the template s files
def get_files ( ) : files_root = path . join ( path . dirname ( __file__ ) , 'files' ) for root , dirs , files in walk ( files_root ) : rel_root = path . relpath ( root , files_root ) for file_name in files : try : f = open ( path . join ( root , file_name ) , 'r' , encoding = 'utf-8' ) with f : yield rel_root , file_name , f . read ( ) , True except UnicodeError : f = open ( path . join ( root , file_name ) , 'rb' ) with f : yield rel_root , file_name , f . read ( ) , False
10,685
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L96-L114
[ "def", "getTotalAssociations", "(", "self", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Wifi", ".", "getServiceType", "(", "\"getTotalAssociations\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "results", "=", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"GetTotalAssociations\"", ",", "timeout", "=", "timeout", ")", "return", "int", "(", "results", "[", "\"NewTotalAssociations\"", "]", ")" ]
Checks that the target path is not empty
def check_target ( target_path ) : if not path . exists ( target_path ) : return with scandir ( target_path ) as d : for entry in d : if not entry . name . startswith ( '.' ) : fail ( f'Target directory "{target_path}" is not empty' )
10,686
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L117-L128
[ "def", "create_event_subscription", "(", "self", ",", "instance", ",", "on_data", ",", "timeout", "=", "60", ")", ":", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ",", "resource", "=", "'events'", ")", "# Represent subscription as a future", "subscription", "=", "WebSocketSubscriptionFuture", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_event", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Replaces variables inside the content .
def replace_content ( content , project_vars ) : for k , v in project_vars . items ( ) : content = content . replace ( f'__{k}__' , v ) return content
10,687
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L131-L139
[ "def", "mount_rate_limit_adapters", "(", "cls", ",", "session", "=", "None", ",", "rls_config", "=", "None", ",", "*", "*", "kwargs", ")", ":", "session", "=", "session", "or", "HTTP_SESSION", "if", "rls_config", "is", "None", ":", "rls_config", "=", "RateLimiter", ".", "get_configs", "(", ")", "for", "name", ",", "rl_conf", "in", "rls_config", ".", "items", "(", ")", ":", "urls", "=", "rl_conf", ".", "get", "(", "'urls'", ",", "[", "]", ")", "if", "not", "urls", ":", "continue", "rl_adapter", "=", "RLRequestAdapter", "(", "name", ",", "config", "=", "rls_config", ",", "*", "*", "kwargs", ")", "for", "url", "in", "urls", ":", "session", ".", "mount", "(", "url", ",", "rl_adapter", ")" ]
Copies files from the template into their target location . Unicode files get their variables replaced here and files with a shebang are set to be executable .
def copy_files ( project_vars , project_dir , files ) : for root , name , content , is_unicode in files : project_name = project_vars [ 'project_name_snake' ] if is_unicode : content = replace_content ( content , project_vars ) file_path = make_file_path ( project_dir , project_name , root , name ) makedirs ( make_dir_path ( project_dir , root , project_name ) , exist_ok = True ) if is_unicode : with open ( file_path , 'w' ) as f : f . write ( content ) if content . startswith ( '#!' ) : chmod ( file_path , 0o755 ) else : with open ( file_path , 'wb' ) as f : f . write ( content )
10,688
https://github.com/BernardFW/bernard/blob/9c55703e5ffe5717c9fa39793df59dbfa5b4c5ab/src/bernard/misc/start_project/_base.py#L142-L166
[ "def", "_create_comparison_method", "(", "cls", ",", "op", ")", ":", "def", "wrapper", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "ABCSeries", ")", ":", "# the arrays defer to Series for comparison ops but the indexes", "# don't, so we have to unwrap here.", "other", "=", "other", ".", "_values", "result", "=", "op", "(", "self", ".", "_data", ",", "maybe_unwrap_index", "(", "other", ")", ")", "return", "result", "wrapper", ".", "__doc__", "=", "op", ".", "__doc__", "wrapper", ".", "__name__", "=", "'__{}__'", ".", "format", "(", "op", ".", "__name__", ")", "return", "wrapper" ]
Connect to the chatroom s server sets up handlers invites members as needed .
def connect ( self ) : for m in self . params [ 'MEMBERS' ] : m [ 'ONLINE' ] = 0 m . setdefault ( 'STATUS' , 'INVITED' ) self . client = xmpp . Client ( self . jid . getDomain ( ) , debug = [ ] ) conn = self . client . connect ( server = self . params [ 'SERVER' ] ) if not conn : raise Exception ( "could not connect to server" ) auth = self . client . auth ( self . jid . getNode ( ) , self . params [ 'PASSWORD' ] ) if not auth : raise Exception ( "could not authenticate as chat server" ) #self.client.RegisterDisconnectHandler(self.on_disconnect) self . client . RegisterHandler ( 'message' , self . on_message ) self . client . RegisterHandler ( 'presence' , self . on_presence ) self . client . sendInitPresence ( requestRoster = 0 ) roster = self . client . getRoster ( ) for m in self . params [ 'MEMBERS' ] : self . invite_user ( m , roster = roster )
10,689
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L31-L53
[ "def", "values", "(", "self", ")", ":", "self", ".", "expired", "(", ")", "values", "=", "[", "]", "for", "key", "in", "self", ".", "_dict", ".", "keys", "(", ")", ":", "try", ":", "value", "=", "self", ".", "_dict", "[", "key", "]", ".", "get", "(", ")", "values", ".", "append", "(", "value", ")", "except", ":", "continue", "return", "values" ]
Get a chatroom member by JID
def get_member ( self , jid , default = None ) : member = filter ( lambda m : m [ 'JID' ] == jid , self . params [ 'MEMBERS' ] ) if len ( member ) == 1 : return member [ 0 ] elif len ( member ) == 0 : return default else : raise Exception ( 'Multple members have the same JID of [%s]' % ( jid , ) )
10,690
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L55-L63
[ "def", "fetcher", "(", "date", "=", "datetime", ".", "today", "(", ")", ",", "url_pattern", "=", "URL_PATTERN", ")", ":", "api_url", "=", "url_pattern", "%", "date", ".", "strftime", "(", "'%Y-%m-%d'", ")", "headers", "=", "{", "'Referer'", ":", "'http://n.pl/program-tv'", "}", "raw_result", "=", "requests", ".", "get", "(", "api_url", ",", "headers", "=", "headers", ")", ".", "json", "(", ")", "return", "raw_result" ]
Check if a user is a member of the chatroom
def is_member ( self , m ) : if not m : return False elif isinstance ( m , basestring ) : jid = m else : jid = m [ 'JID' ] is_member = len ( filter ( lambda m : m [ 'JID' ] == jid and m . get ( 'STATUS' ) in ( 'ACTIVE' , 'INVITED' ) , self . params [ 'MEMBERS' ] ) ) > 0 return is_member
10,691
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L65-L76
[ "def", "_get_available_ports", "(", ")", ":", "if", "platform", ".", "system", "(", ")", "==", "'Darwin'", ":", "return", "glob", ".", "glob", "(", "'/dev/tty.usb*'", ")", "elif", "platform", ".", "system", "(", ")", "==", "'Linux'", ":", "return", "glob", ".", "glob", "(", "'/dev/ttyACM*'", ")", "+", "glob", ".", "glob", "(", "'/dev/ttyUSB*'", ")", "+", "glob", ".", "glob", "(", "'/dev/ttyAMA*'", ")", "elif", "sys", ".", "platform", ".", "lower", "(", ")", "==", "'cygwin'", ":", "return", "glob", ".", "glob", "(", "'/dev/com*'", ")", "elif", "platform", ".", "system", "(", ")", "==", "'Windows'", ":", "import", "_winreg", "import", "itertools", "ports", "=", "[", "]", "path", "=", "'HARDWARE\\\\DEVICEMAP\\\\SERIALCOMM'", "key", "=", "_winreg", ".", "OpenKey", "(", "_winreg", ".", "HKEY_LOCAL_MACHINE", ",", "path", ")", "for", "i", "in", "itertools", ".", "count", "(", ")", ":", "try", ":", "ports", ".", "append", "(", "str", "(", "_winreg", ".", "EnumValue", "(", "key", ",", "i", ")", "[", "1", "]", ")", ")", "except", "WindowsError", ":", "return", "ports", "else", ":", "raise", "EnvironmentError", "(", "'{} is an unsupported platform, cannot find serial ports !'", ".", "format", "(", "platform", ".", "system", "(", ")", ")", ")", "return", "[", "]" ]
Invites a new member to the chatroom
def invite_user ( self , new_member , inviter = None , roster = None ) : roster = roster or self . client . getRoster ( ) jid = new_member [ 'JID' ] logger . info ( 'roster %s %s' % ( jid , roster . getSubscription ( jid ) ) ) if jid in roster . keys ( ) and roster . getSubscription ( jid ) in [ 'both' , 'to' ] : new_member [ 'STATUS' ] = 'ACTIVE' if inviter : self . send_message ( '%s is already a member' % ( jid , ) , inviter ) else : new_member [ 'STATUS' ] = 'INVITED' self . broadcast ( 'inviting %s to the room' % ( jid , ) ) #Add nickname according to http://xmpp.org/extensions/xep-0172.html subscribe_presence = xmpp . dispatcher . Presence ( to = jid , typ = 'subscribe' ) if 'NICK' in self . params : subscribe_presence . addChild ( name = 'nick' , namespace = xmpp . protocol . NS_NICK , payload = self . params [ 'NICK' ] ) self . client . send ( subscribe_presence ) if not self . is_member ( new_member ) : new_member . setdefault ( 'NICK' , jid . split ( '@' ) [ 0 ] ) self . params [ 'MEMBERS' ] . append ( new_member )
10,692
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L78-L100
[ "def", "list_sites", "(", ")", ":", "ret", "=", "dict", "(", ")", "ps_cmd", "=", "[", "'Get-ChildItem'", ",", "'-Path'", ",", "r\"'IIS:\\Sites'\"", ",", "'|'", ",", "'Select-Object applicationPool, applicationDefaults, Bindings, ID, Name, PhysicalPath, State'", "]", "keep_keys", "=", "(", "'certificateHash'", ",", "'certificateStoreName'", ",", "'protocol'", ",", "'sslFlags'", ")", "cmd_ret", "=", "_srvmgr", "(", "cmd", "=", "ps_cmd", ",", "return_json", "=", "True", ")", "try", ":", "items", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "cmd_ret", "[", "'stdout'", "]", ",", "strict", "=", "False", ")", "except", "ValueError", ":", "raise", "CommandExecutionError", "(", "'Unable to parse return data as Json.'", ")", "for", "item", "in", "items", ":", "bindings", "=", "dict", "(", ")", "for", "binding", "in", "item", "[", "'bindings'", "]", "[", "'Collection'", "]", ":", "# Ignore bindings which do not have host names", "if", "binding", "[", "'protocol'", "]", "not", "in", "[", "'http'", ",", "'https'", "]", ":", "continue", "filtered_binding", "=", "dict", "(", ")", "for", "key", "in", "binding", ":", "if", "key", "in", "keep_keys", ":", "filtered_binding", ".", "update", "(", "{", "key", ".", "lower", "(", ")", ":", "binding", "[", "key", "]", "}", ")", "binding_info", "=", "binding", "[", "'bindingInformation'", "]", ".", "split", "(", "':'", ",", "2", ")", "ipaddress", ",", "port", ",", "hostheader", "=", "[", "element", ".", "strip", "(", ")", "for", "element", "in", "binding_info", "]", "filtered_binding", ".", "update", "(", "{", "'hostheader'", ":", "hostheader", ",", "'ipaddress'", ":", "ipaddress", ",", "'port'", ":", "port", "}", ")", "bindings", "[", "binding", "[", "'bindingInformation'", "]", "]", "=", "filtered_binding", "# ApplicationDefaults", "application_defaults", "=", "dict", "(", ")", "for", "attribute", "in", "item", "[", "'applicationDefaults'", "]", "[", "'Attributes'", "]", ":", "application_defaults", ".", "update", "(", "{", "attribute", "[", "'Name'", "]", ":", "attribute", "[", "'Value'", "]", "}", ")", "# ApplicationDefaults", "ret", "[", "item", "[", "'name'", "]", "]", "=", "{", "'apppool'", ":", "item", "[", "'applicationPool'", "]", ",", "'bindings'", ":", "bindings", ",", "'applicationDefaults'", ":", "application_defaults", ",", "'id'", ":", "item", "[", "'id'", "]", ",", "'state'", ":", "item", "[", "'state'", "]", ",", "'sourcepath'", ":", "item", "[", "'physicalPath'", "]", "}", "if", "not", "ret", ":", "log", ".", "warning", "(", "'No sites found in output: %s'", ",", "cmd_ret", "[", "'stdout'", "]", ")", "return", "ret" ]
Kicks a member from the chatroom . Kicked user will receive no more messages .
def kick_user ( self , jid ) : for member in filter ( lambda m : m [ 'JID' ] == jid , self . params [ 'MEMBERS' ] ) : member [ 'STATUS' ] = 'KICKED' self . send_message ( 'You have been kicked from %s' % ( self . name , ) , member ) self . client . sendPresence ( jid = member [ 'JID' ] , typ = 'unsubscribed' ) self . client . sendPresence ( jid = member [ 'JID' ] , typ = 'unsubscribe' ) self . broadcast ( 'kicking %s from the room' % ( jid , ) )
10,693
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L102-L110
[ "def", "parse_general_name", "(", "name", ")", ":", "name", "=", "force_text", "(", "name", ")", "typ", "=", "None", "match", "=", "GENERAL_NAME_RE", ".", "match", "(", "name", ")", "if", "match", "is", "not", "None", ":", "typ", ",", "name", "=", "match", ".", "groups", "(", ")", "typ", "=", "typ", ".", "lower", "(", ")", "if", "typ", "is", "None", ":", "if", "re", ".", "match", "(", "'[a-z0-9]{2,}://'", ",", "name", ")", ":", "# Looks like a URI", "try", ":", "return", "x509", ".", "UniformResourceIdentifier", "(", "name", ")", "except", "Exception", ":", "# pragma: no cover - this really accepts anything", "pass", "if", "'@'", "in", "name", ":", "# Looks like an Email address", "try", ":", "return", "x509", ".", "RFC822Name", "(", "validate_email", "(", "name", ")", ")", "except", "Exception", ":", "pass", "if", "name", ".", "strip", "(", ")", ".", "startswith", "(", "'/'", ")", ":", "# maybe it's a dirname?", "return", "x509", ".", "DirectoryName", "(", "x509_name", "(", "name", ")", ")", "# Try to parse this as IPAddress/Network", "try", ":", "return", "x509", ".", "IPAddress", "(", "ip_address", "(", "name", ")", ")", "except", "ValueError", ":", "pass", "try", ":", "return", "x509", ".", "IPAddress", "(", "ip_network", "(", "name", ")", ")", "except", "ValueError", ":", "pass", "# Try to encode as domain name. DNSName() does not validate the domain name, but this check will fail.", "if", "name", ".", "startswith", "(", "'*.'", ")", ":", "idna", ".", "encode", "(", "name", "[", "2", ":", "]", ")", "elif", "name", ".", "startswith", "(", "'.'", ")", ":", "idna", ".", "encode", "(", "name", "[", "1", ":", "]", ")", "else", ":", "idna", ".", "encode", "(", "name", ")", "# Almost anything passes as DNS name, so this is our default fallback", "return", "x509", ".", "DNSName", "(", "name", ")", "if", "typ", "==", "'uri'", ":", "return", "x509", ".", "UniformResourceIdentifier", "(", "name", ")", "elif", "typ", "==", "'email'", ":", "return", "x509", ".", "RFC822Name", "(", "validate_email", "(", "name", ")", ")", "elif", "typ", "==", "'ip'", ":", "try", ":", "return", "x509", ".", "IPAddress", "(", "ip_address", "(", "name", ")", ")", "except", "ValueError", ":", "pass", "try", ":", "return", "x509", ".", "IPAddress", "(", "ip_network", "(", "name", ")", ")", "except", "ValueError", ":", "pass", "raise", "ValueError", "(", "'Could not parse IP address.'", ")", "elif", "typ", "==", "'rid'", ":", "return", "x509", ".", "RegisteredID", "(", "x509", ".", "ObjectIdentifier", "(", "name", ")", ")", "elif", "typ", "==", "'othername'", ":", "regex", "=", "\"(.*);(.*):(.*)\"", "if", "re", ".", "match", "(", "regex", ",", "name", ")", "is", "not", "None", ":", "oid", ",", "asn_typ", ",", "val", "=", "re", ".", "match", "(", "regex", ",", "name", ")", ".", "groups", "(", ")", "oid", "=", "x509", ".", "ObjectIdentifier", "(", "oid", ")", "if", "asn_typ", "==", "'UTF8'", ":", "val", "=", "val", ".", "encode", "(", "'utf-8'", ")", "elif", "asn_typ", "==", "'OctetString'", ":", "val", "=", "bytes", "(", "bytearray", ".", "fromhex", "(", "val", ")", ")", "val", "=", "OctetString", "(", "val", ")", ".", "dump", "(", ")", "else", ":", "raise", "ValueError", "(", "'Unsupported ASN type in otherName: %s'", "%", "asn_typ", ")", "val", "=", "force_bytes", "(", "val", ")", "return", "x509", ".", "OtherName", "(", "oid", ",", "val", ")", "else", ":", "raise", "ValueError", "(", "'Incorrect otherName format: %s'", "%", "name", ")", "elif", "typ", "==", "'dirname'", ":", "return", "x509", ".", "DirectoryName", "(", "x509_name", "(", "name", ")", ")", "else", ":", "# Try to encode the domain name. DNSName() does not validate the domain name, but this", "# check will fail.", "if", "name", ".", "startswith", "(", "'*.'", ")", ":", "idna", ".", "encode", "(", "name", "[", "2", ":", "]", ")", "elif", "name", ".", "startswith", "(", "'.'", ")", ":", "idna", ".", "encode", "(", "name", "[", "1", ":", "]", ")", "else", ":", "idna", ".", "encode", "(", "name", ")", "return", "x509", ".", "DNSName", "(", "name", ")" ]
Send a message to a single member
def send_message ( self , body , to , quiet = False , html_body = None ) : if to . get ( 'MUTED' ) : to [ 'QUEUED_MESSAGES' ] . append ( body ) else : if not quiet : logger . info ( 'message on %s to %s: %s' % ( self . name , to [ 'JID' ] , body ) ) message = xmpp . protocol . Message ( to = to [ 'JID' ] , body = body , typ = 'chat' ) if html_body : html = xmpp . Node ( 'html' , { 'xmlns' : 'http://jabber.org/protocol/xhtml-im' } ) html . addChild ( node = xmpp . simplexml . XML2Node ( "<body xmlns='http://www.w3.org/1999/xhtml'>" + html_body . encode ( 'utf-8' ) + "</body>" ) ) message . addChild ( node = html ) self . client . send ( message )
10,694
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L112-L126
[ "def", "diag_ksl", "(", "A", ",", "y0", ",", "tau", ",", "verb", "=", "1", ",", "scheme", "=", "'symm'", ",", "space", "=", "8", ",", "rmax", "=", "2000", ")", ":", "y0", "=", "y0", ".", "round", "(", "1e-14", ")", "# This will fix ranks", "# to be no more than maximal reasonable.", "# Fortran part doesn't handle excessive ranks", "ry", "=", "y0", ".", "r", ".", "copy", "(", ")", "if", "scheme", "is", "'symm'", ":", "tp", "=", "2", "else", ":", "tp", "=", "1", "# Check for dtype", "y", "=", "tt", ".", "vector", "(", ")", "if", "np", ".", "iscomplex", "(", "A", ".", "core", ")", ".", "any", "(", ")", "or", "np", ".", "iscomplex", "(", "y0", ".", "core", ")", ".", "any", "(", ")", ":", "dyn_tt", ".", "dyn_diag_tt", ".", "ztt_diag_ksl", "(", "y0", ".", "d", ",", "A", ".", "n", ",", "A", ".", "r", ",", "A", ".", "core", "+", "0j", ",", "y0", ".", "core", "+", "0j", ",", "ry", ",", "tau", ",", "rmax", ",", "0", ",", "10", ",", "verb", ",", "tp", ",", "space", ")", "y", ".", "core", "=", "dyn_tt", ".", "dyn_diag_tt", ".", "zresult_core", ".", "copy", "(", ")", "else", ":", "A", ".", "core", "=", "np", ".", "real", "(", "A", ".", "core", ")", "y0", ".", "core", "=", "np", ".", "real", "(", "y0", ".", "core", ")", "dyn_tt", ".", "dyn_diag_tt", ".", "dtt_diag_ksl", "(", "y0", ".", "d", ",", "A", ".", "n", ",", "A", ".", "r", ",", "A", ".", "core", ",", "y0", ".", "core", ",", "ry", ",", "tau", ",", "rmax", ",", "0", ",", "10", ",", "verb", ",", "tp", ",", "space", ")", "y", ".", "core", "=", "dyn_tt", ".", "dyn_diag_tt", ".", "dresult_core", ".", "copy", "(", ")", "dyn_tt", ".", "dyn_diag_tt", ".", "deallocate_result", "(", ")", "y", ".", "d", "=", "y0", ".", "d", "y", ".", "n", "=", "A", ".", "n", ".", "copy", "(", ")", "y", ".", "r", "=", "ry", "y", ".", "get_ps", "(", ")", "return", "y" ]
Broadcast a message to users in the chatroom
def broadcast ( self , body , html_body = None , exclude = ( ) ) : logger . info ( 'broadcast on %s: %s' % ( self . name , body , ) ) for member in filter ( lambda m : m . get ( 'STATUS' ) == 'ACTIVE' and m not in exclude , self . params [ 'MEMBERS' ] ) : logger . debug ( member [ 'JID' ] ) self . send_message ( body , member , html_body = html_body , quiet = True )
10,695
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L128-L133
[ "def", "list_sites", "(", ")", ":", "ret", "=", "dict", "(", ")", "ps_cmd", "=", "[", "'Get-ChildItem'", ",", "'-Path'", ",", "r\"'IIS:\\Sites'\"", ",", "'|'", ",", "'Select-Object applicationPool, applicationDefaults, Bindings, ID, Name, PhysicalPath, State'", "]", "keep_keys", "=", "(", "'certificateHash'", ",", "'certificateStoreName'", ",", "'protocol'", ",", "'sslFlags'", ")", "cmd_ret", "=", "_srvmgr", "(", "cmd", "=", "ps_cmd", ",", "return_json", "=", "True", ")", "try", ":", "items", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "cmd_ret", "[", "'stdout'", "]", ",", "strict", "=", "False", ")", "except", "ValueError", ":", "raise", "CommandExecutionError", "(", "'Unable to parse return data as Json.'", ")", "for", "item", "in", "items", ":", "bindings", "=", "dict", "(", ")", "for", "binding", "in", "item", "[", "'bindings'", "]", "[", "'Collection'", "]", ":", "# Ignore bindings which do not have host names", "if", "binding", "[", "'protocol'", "]", "not", "in", "[", "'http'", ",", "'https'", "]", ":", "continue", "filtered_binding", "=", "dict", "(", ")", "for", "key", "in", "binding", ":", "if", "key", "in", "keep_keys", ":", "filtered_binding", ".", "update", "(", "{", "key", ".", "lower", "(", ")", ":", "binding", "[", "key", "]", "}", ")", "binding_info", "=", "binding", "[", "'bindingInformation'", "]", ".", "split", "(", "':'", ",", "2", ")", "ipaddress", ",", "port", ",", "hostheader", "=", "[", "element", ".", "strip", "(", ")", "for", "element", "in", "binding_info", "]", "filtered_binding", ".", "update", "(", "{", "'hostheader'", ":", "hostheader", ",", "'ipaddress'", ":", "ipaddress", ",", "'port'", ":", "port", "}", ")", "bindings", "[", "binding", "[", "'bindingInformation'", "]", "]", "=", "filtered_binding", "# ApplicationDefaults", "application_defaults", "=", "dict", "(", ")", "for", "attribute", "in", "item", "[", "'applicationDefaults'", "]", "[", "'Attributes'", "]", ":", "application_defaults", ".", "update", "(", "{", "attribute", "[", "'Name'", "]", ":", "attribute", "[", "'Value'", "]", "}", ")", "# ApplicationDefaults", "ret", "[", "item", "[", "'name'", "]", "]", "=", "{", "'apppool'", ":", "item", "[", "'applicationPool'", "]", ",", "'bindings'", ":", "bindings", ",", "'applicationDefaults'", ":", "application_defaults", ",", "'id'", ":", "item", "[", "'id'", "]", ",", "'state'", ":", "item", "[", "'state'", "]", ",", "'sourcepath'", ":", "item", "[", "'physicalPath'", "]", "}", "if", "not", "ret", ":", "log", ".", "warning", "(", "'No sites found in output: %s'", ",", "cmd_ret", "[", "'stdout'", "]", ")", "return", "ret" ]
Invite members to the chatroom on a user s behalf
def do_invite ( self , sender , body , args ) : for invitee in args : new_member = { 'JID' : invitee } self . invite_user ( new_member , inviter = sender )
10,696
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L141-L145
[ "def", "Run", "(", "self", ",", "args", ")", ":", "with", "vfs", ".", "VFSOpen", "(", "args", ".", "pathspec", ",", "progress_callback", "=", "self", ".", "Progress", ")", "as", "file_obj", ":", "fingerprinter", "=", "Fingerprinter", "(", "self", ".", "Progress", ",", "file_obj", ")", "response", "=", "rdf_client_action", ".", "FingerprintResponse", "(", ")", "response", ".", "pathspec", "=", "file_obj", ".", "pathspec", "if", "args", ".", "tuples", ":", "tuples", "=", "args", ".", "tuples", "else", ":", "# There are none selected -- we will cover everything", "tuples", "=", "list", "(", ")", "for", "k", "in", "self", ".", "_fingerprint_types", ":", "tuples", ".", "append", "(", "rdf_client_action", ".", "FingerprintTuple", "(", "fp_type", "=", "k", ")", ")", "for", "finger", "in", "tuples", ":", "hashers", "=", "[", "self", ".", "_hash_types", "[", "h", "]", "for", "h", "in", "finger", ".", "hashers", "]", "or", "None", "if", "finger", ".", "fp_type", "in", "self", ".", "_fingerprint_types", ":", "invoke", "=", "self", ".", "_fingerprint_types", "[", "finger", ".", "fp_type", "]", "res", "=", "invoke", "(", "fingerprinter", ",", "hashers", ")", "if", "res", ":", "response", ".", "matching_types", ".", "append", "(", "finger", ".", "fp_type", ")", "else", ":", "raise", "RuntimeError", "(", "\"Encountered unknown fingerprint type. %s\"", "%", "finger", ".", "fp_type", ")", "# Structure of the results is a list of dicts, each containing the", "# name of the hashing method, hashes for enabled hash algorithms,", "# and auxilliary data where present (e.g. signature blobs).", "# Also see Fingerprint:HashIt()", "response", ".", "results", "=", "fingerprinter", ".", "HashIt", "(", ")", "# We now return data in a more structured form.", "for", "result", "in", "response", ".", "results", ":", "if", "result", ".", "GetItem", "(", "\"name\"", ")", "==", "\"generic\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", "is", "not", "None", ":", "setattr", "(", "response", ".", "hash", ",", "hash_type", ",", "value", ")", "if", "result", "[", "\"name\"", "]", "==", "\"pecoff\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", ":", "setattr", "(", "response", ".", "hash", ",", "\"pecoff_\"", "+", "hash_type", ",", "value", ")", "signed_data", "=", "result", ".", "GetItem", "(", "\"SignedData\"", ",", "[", "]", ")", "for", "data", "in", "signed_data", ":", "response", ".", "hash", ".", "signed_data", ".", "Append", "(", "revision", "=", "data", "[", "0", "]", ",", "cert_type", "=", "data", "[", "1", "]", ",", "certificate", "=", "data", "[", "2", "]", ")", "self", ".", "SendReply", "(", "response", ")" ]
Kick a member from the chatroom . Must be Admin to kick users
def do_kick ( self , sender , body , args ) : if sender . get ( 'ADMIN' ) != True : return for user in args : self . kick_user ( user )
10,697
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L147-L151
[ "def", "_OpenFile", "(", "self", ",", "path", ")", ":", "if", "not", "self", ".", "_registry_file_reader", ":", "return", "None", "return", "self", ".", "_registry_file_reader", ".", "Open", "(", "path", ",", "ascii_codepage", "=", "self", ".", "_ascii_codepage", ")" ]
Temporarily mutes chatroom for a user
def do_mute ( self , sender , body , args ) : if sender . get ( 'MUTED' ) : self . send_message ( 'you are already muted' , sender ) else : self . broadcast ( '%s has muted this chatroom' % ( sender [ 'NICK' ] , ) ) sender [ 'QUEUED_MESSAGES' ] = [ ] sender [ 'MUTED' ] = True
10,698
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L153-L160
[ "def", "max_consecutive_days", "(", "self", ")", "->", "Optional", "[", "Tuple", "[", "int", ",", "Interval", "]", "]", ":", "if", "len", "(", "self", ".", "intervals", ")", "==", "0", ":", "return", "None", "startdate", "=", "self", ".", "start_date", "(", ")", "enddate", "=", "self", ".", "end_date", "(", ")", "seq", "=", "''", "ndays", "=", "(", "enddate", "-", "startdate", ")", ".", "days", "+", "1", "for", "i", "in", "range", "(", "ndays", ")", ":", "date", "=", "startdate", "+", "datetime", ".", "timedelta", "(", "days", "=", "i", ")", "wholeday", "=", "Interval", ".", "wholeday", "(", "date", ")", "if", "any", "(", "[", "x", ".", "overlaps", "(", "wholeday", ")", "for", "x", "in", "self", ".", "intervals", "]", ")", ":", "seq", "+=", "'+'", "else", ":", "seq", "+=", "' '", "# noinspection PyTypeChecker", "longest", "=", "max", "(", "seq", ".", "split", "(", ")", ",", "key", "=", "len", ")", "longest_len", "=", "len", "(", "longest", ")", "longest_idx", "=", "seq", ".", "index", "(", "longest", ")", "longest_interval", "=", "Interval", ".", "dayspan", "(", "startdate", "+", "datetime", ".", "timedelta", "(", "days", "=", "longest_idx", ")", ",", "startdate", "+", "datetime", ".", "timedelta", "(", "days", "=", "longest_idx", "+", "longest_len", ")", ")", "return", "longest_len", ",", "longest_interval" ]
Unmutes the chatroom for a user
def do_unmute ( self , sender , body , args ) : if sender . get ( 'MUTED' ) : sender [ 'MUTED' ] = False self . broadcast ( '%s has unmuted this chatroom' % ( sender [ 'NICK' ] , ) ) for msg in sender . get ( 'QUEUED_MESSAGES' , [ ] ) : self . send_message ( msg , sender ) sender [ 'QUEUED_MESSAGES' ] = [ ] else : self . send_message ( 'you were not muted' , sender )
10,699
https://github.com/mattlong/hermes/blob/63a5afcafe90ca99aeb44edeee9ed6f90baae431/hermes/chatroom.py#L162-L171
[ "def", "Run", "(", "self", ",", "args", ")", ":", "with", "vfs", ".", "VFSOpen", "(", "args", ".", "pathspec", ",", "progress_callback", "=", "self", ".", "Progress", ")", "as", "file_obj", ":", "fingerprinter", "=", "Fingerprinter", "(", "self", ".", "Progress", ",", "file_obj", ")", "response", "=", "rdf_client_action", ".", "FingerprintResponse", "(", ")", "response", ".", "pathspec", "=", "file_obj", ".", "pathspec", "if", "args", ".", "tuples", ":", "tuples", "=", "args", ".", "tuples", "else", ":", "# There are none selected -- we will cover everything", "tuples", "=", "list", "(", ")", "for", "k", "in", "self", ".", "_fingerprint_types", ":", "tuples", ".", "append", "(", "rdf_client_action", ".", "FingerprintTuple", "(", "fp_type", "=", "k", ")", ")", "for", "finger", "in", "tuples", ":", "hashers", "=", "[", "self", ".", "_hash_types", "[", "h", "]", "for", "h", "in", "finger", ".", "hashers", "]", "or", "None", "if", "finger", ".", "fp_type", "in", "self", ".", "_fingerprint_types", ":", "invoke", "=", "self", ".", "_fingerprint_types", "[", "finger", ".", "fp_type", "]", "res", "=", "invoke", "(", "fingerprinter", ",", "hashers", ")", "if", "res", ":", "response", ".", "matching_types", ".", "append", "(", "finger", ".", "fp_type", ")", "else", ":", "raise", "RuntimeError", "(", "\"Encountered unknown fingerprint type. %s\"", "%", "finger", ".", "fp_type", ")", "# Structure of the results is a list of dicts, each containing the", "# name of the hashing method, hashes for enabled hash algorithms,", "# and auxilliary data where present (e.g. signature blobs).", "# Also see Fingerprint:HashIt()", "response", ".", "results", "=", "fingerprinter", ".", "HashIt", "(", ")", "# We now return data in a more structured form.", "for", "result", "in", "response", ".", "results", ":", "if", "result", ".", "GetItem", "(", "\"name\"", ")", "==", "\"generic\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", "is", "not", "None", ":", "setattr", "(", "response", ".", "hash", ",", "hash_type", ",", "value", ")", "if", "result", "[", "\"name\"", "]", "==", "\"pecoff\"", ":", "for", "hash_type", "in", "[", "\"md5\"", ",", "\"sha1\"", ",", "\"sha256\"", "]", ":", "value", "=", "result", ".", "GetItem", "(", "hash_type", ")", "if", "value", ":", "setattr", "(", "response", ".", "hash", ",", "\"pecoff_\"", "+", "hash_type", ",", "value", ")", "signed_data", "=", "result", ".", "GetItem", "(", "\"SignedData\"", ",", "[", "]", ")", "for", "data", "in", "signed_data", ":", "response", ".", "hash", ".", "signed_data", ".", "Append", "(", "revision", "=", "data", "[", "0", "]", ",", "cert_type", "=", "data", "[", "1", "]", ",", "certificate", "=", "data", "[", "2", "]", ")", "self", ".", "SendReply", "(", "response", ")" ]