query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Commence macro recording .
def qteStartRecordingHook ( self , msgObj ) : if self . qteRecording : self . qteMain . qteStatus ( 'Macro recording already enabled' ) return # Update status flag. self . qteRecording = True # Reset the variables. self . qteMain . qteStatus ( 'Macro recording started' ) self . recorded_keysequence = QtmacsKeysequence ( ) # Connect the 'keypressed' and 'abort' signals. self . qteMain . qtesigKeyparsed . connect ( self . qteKeyPress ) self . qteMain . qtesigAbort . connect ( self . qteStopRecordingHook )
7,900
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/qtmacsmain_macros.py#L254-L277
[ "def", "nurbs_to_bspline", "(", "obj", ",", "*", "*", "kwargs", ")", ":", "if", "not", "obj", ".", "rational", ":", "raise", "TypeError", "(", "\"The input must be a rational shape\"", ")", "# Get keyword arguments", "tol", "=", "kwargs", ".", "get", "(", "'tol'", ",", "10e-8", ")", "# Test for non-rational component extraction", "for", "w", "in", "obj", ".", "weights", ":", "if", "abs", "(", "w", "-", "1.0", ")", ">", "tol", ":", "print", "(", "\"Cannot extract non-rational components\"", ")", "return", "obj", "# NURBS -> B-Spline", "if", "isinstance", "(", "obj", ",", "NURBS", ".", "Curve", ")", ":", "return", "_convert", ".", "convert_curve", "(", "obj", ",", "BSpline", ")", "elif", "isinstance", "(", "obj", ",", "NURBS", ".", "Surface", ")", ":", "return", "_convert", ".", "convert_surface", "(", "obj", ",", "BSpline", ")", "elif", "isinstance", "(", "obj", ",", "NURBS", ".", "Volume", ")", ":", "return", "_convert", ".", "convert_volume", "(", "obj", ",", "BSpline", ")", "else", ":", "raise", "TypeError", "(", "\"Input must be an instance of NURBS curve, surface or volume\"", ")" ]
Stop macro recording .
def qteStopRecordingHook ( self , msgObj ) : # Update status flag and disconnect all signals. if self . qteRecording : self . qteRecording = False self . qteMain . qteStatus ( 'Macro recording stopped' ) self . qteMain . qtesigKeyparsed . disconnect ( self . qteKeyPress ) self . qteMain . qtesigAbort . disconnect ( self . qteStopRecordingHook )
7,901
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/qtmacsmain_macros.py#L279-L291
[ "def", "defBoundary", "(", "self", ")", ":", "self", ".", "BoroCnstNatAll", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "# Find the natural borrowing constraint conditional on next period's state", "for", "j", "in", "range", "(", "self", ".", "StateCount", ")", ":", "PermShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "1", "]", ")", "TranShkMinNext", "=", "np", ".", "min", "(", "self", ".", "IncomeDstn_list", "[", "j", "]", "[", "2", "]", ")", "self", ".", "BoroCnstNatAll", "[", "j", "]", "=", "(", "self", ".", "solution_next", ".", "mNrmMin", "[", "j", "]", "-", "TranShkMinNext", ")", "*", "(", "self", ".", "PermGroFac_list", "[", "j", "]", "*", "PermShkMinNext", ")", "/", "self", ".", "Rfree_list", "[", "j", "]", "self", ".", "BoroCnstNat_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "mNrmMin_list", "=", "np", ".", "zeros", "(", "self", ".", "StateCount", ")", "+", "np", ".", "nan", "self", ".", "BoroCnstDependency", "=", "np", ".", "zeros", "(", "(", "self", ".", "StateCount", ",", "self", ".", "StateCount", ")", ")", "+", "np", ".", "nan", "# The natural borrowing constraint in each current state is the *highest*", "# among next-state-conditional natural borrowing constraints that could", "# occur from this current state.", "for", "i", "in", "range", "(", "self", ".", "StateCount", ")", ":", "possible_next_states", "=", "self", ".", "MrkvArray", "[", "i", ",", ":", "]", ">", "0", "self", ".", "BoroCnstNat_list", "[", "i", "]", "=", "np", ".", "max", "(", "self", ".", "BoroCnstNatAll", "[", "possible_next_states", "]", ")", "# Explicitly handle the \"None\" case: ", "if", "self", ".", "BoroCnstArt", "is", "None", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "else", ":", "self", ".", "mNrmMin_list", "[", "i", "]", "=", "np", ".", "max", "(", "[", "self", ".", "BoroCnstNat_list", "[", "i", "]", ",", "self", ".", "BoroCnstArt", "]", ")", "self", ".", "BoroCnstDependency", "[", "i", ",", ":", "]", "=", "self", ".", "BoroCnstNat_list", "[", "i", "]", "==", "self", ".", "BoroCnstNatAll" ]
Replay the macro sequence .
def qteReplayKeysequenceHook ( self , msgObj ) : # Quit if there is nothing to replay. if self . recorded_keysequence . toString ( ) == '' : return # Stop the recording before the replay, if necessary. if self . qteRecording : return # Simulate the key presses. self . qteMain . qteEmulateKeypresses ( self . recorded_keysequence )
7,902
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/qtmacsmain_macros.py#L293-L307
[ "def", "adapt_files", "(", "solver", ")", ":", "print", "(", "\"adapting {0}'s files\"", ".", "format", "(", "solver", ")", ")", "root", "=", "os", ".", "path", ".", "join", "(", "'solvers'", ",", "solver", ")", "for", "arch", "in", "to_extract", "[", "solver", "]", ":", "arch", "=", "os", ".", "path", ".", "join", "(", "root", ",", "arch", ")", "extract_archive", "(", "arch", ",", "solver", ",", "put_inside", "=", "True", ")", "for", "fnames", "in", "to_move", "[", "solver", "]", ":", "old", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fnames", "[", "0", "]", ")", "new", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fnames", "[", "1", "]", ")", "os", ".", "rename", "(", "old", ",", "new", ")", "for", "f", "in", "to_remove", "[", "solver", "]", ":", "f", "=", "os", ".", "path", ".", "join", "(", "root", ",", "f", ")", "if", "os", ".", "path", ".", "isdir", "(", "f", ")", ":", "shutil", ".", "rmtree", "(", "f", ")", "else", ":", "os", ".", "remove", "(", "f", ")" ]
Record the key presses .
def qteKeyPress ( self , msgObj ) : # Unpack the data structure. ( srcObj , keysequence , macroName ) = msgObj . data # Return immediately if the key sequence does not specify a # macro (yet). if macroName is None : return # If the macro to repeat is this very macro then disable the # macro proxy, otherwise execute the macro that would have run # originally. if macroName == self . qteMacroName ( ) : self . abort ( ) else : msg = 'Executing macro {} through {}' msg = msg . format ( macroName , self . qteMacroName ( ) ) self . qteMain . qteStatus ( msg ) self . qteMain . qteRunMacro ( macroName , srcObj , keysequence )
7,903
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/qtmacsmain_macros.py#L460-L481
[ "def", "restore", "(", "self", ")", ":", "clean_beam", ",", "beam_params", "=", "beam_fit", "(", "self", ".", "psf_data", ",", "self", ".", "cdelt1", ",", "self", ".", "cdelt2", ")", "if", "np", ".", "all", "(", "np", ".", "array", "(", "self", ".", "psf_data_shape", ")", "==", "2", "*", "np", ".", "array", "(", "self", ".", "dirty_data_shape", ")", ")", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "conv", ".", "pad_array", "(", "self", ".", "model", ")", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "=", "self", ".", "restored", "[", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "0", "]", "/", "2", ",", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", ":", "-", "self", ".", "dirty_data_shape", "[", "1", "]", "/", "2", "]", "else", ":", "self", ".", "restored", "=", "np", ".", "fft", ".", "fftshift", "(", "np", ".", "fft", ".", "irfft2", "(", "np", ".", "fft", ".", "rfft2", "(", "self", ".", "model", ")", "*", "np", ".", "fft", ".", "rfft2", "(", "clean_beam", ")", ")", ")", "self", ".", "restored", "+=", "self", ".", "residual", "self", ".", "restored", "=", "self", ".", "restored", ".", "astype", "(", "np", ".", "float32", ")", "return", "beam_params" ]
Disconnect all signals and turn macro processing in the event handler back on .
def abort ( self , msgObj ) : self . qteMain . qtesigKeyparsed . disconnect ( self . qteKeyPress ) self . qteMain . qtesigAbort . disconnect ( self . abort ) self . qteActive = False self . qteMain . qteEnableMacroProcessing ( )
7,904
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/qtmacsmain_macros.py#L483-L491
[ "def", "reserve_udp_port", "(", "self", ",", "port", ",", "project", ")", ":", "if", "port", "in", "self", ".", "_used_udp_ports", ":", "raise", "HTTPConflict", "(", "text", "=", "\"UDP port {} already in use on host {}\"", ".", "format", "(", "port", ",", "self", ".", "_console_host", ")", ")", "if", "port", "<", "self", ".", "_udp_port_range", "[", "0", "]", "or", "port", ">", "self", ".", "_udp_port_range", "[", "1", "]", ":", "raise", "HTTPConflict", "(", "text", "=", "\"UDP port {} is outside the range {}-{}\"", ".", "format", "(", "port", ",", "self", ".", "_udp_port_range", "[", "0", "]", ",", "self", ".", "_udp_port_range", "[", "1", "]", ")", ")", "self", ".", "_used_udp_ports", ".", "add", "(", "port", ")", "project", ".", "record_udp_port", "(", "port", ")", "log", ".", "debug", "(", "\"UDP port {} has been reserved\"", ".", "format", "(", "port", ")", ")" ]
Return a new ConciergeClient pulling secrets from the environment .
def get_new_client ( request_session = False ) : from . client import ConciergeClient client = ConciergeClient ( access_key = os . environ [ "MS_ACCESS_KEY" ] , secret_key = os . environ [ "MS_SECRET_KEY" ] , association_id = os . environ [ "MS_ASSOCIATION_ID" ] ) if request_session : client . request_session ( ) return client
7,905
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/utils.py#L44-L54
[ "def", "get_operation_mtf_dimension_names", "(", "self", ",", "operation_name", ")", ":", "mtf_dimension_names", "=", "set", "(", ")", "for", "tensor_name", "in", "self", ".", "get_operation_input_names", "(", "operation_name", ")", ":", "mtf_dimension_names", ".", "update", "(", "self", ".", "get_tensor_mtf_dimension_names", "(", "tensor_name", ")", ")", "for", "tensor_name", "in", "self", ".", "get_operation_output_names", "(", "operation_name", ")", ":", "mtf_dimension_names", ".", "update", "(", "self", ".", "get_tensor_mtf_dimension_names", "(", "tensor_name", ")", ")", "return", "mtf_dimension_names" ]
Submit object_query to MemberSuite returning . models . MemberSuiteObjects .
def submit_msql_object_query ( object_query , client = None ) : client = client or get_new_client ( ) if not client . session_id : client . request_session ( ) result = client . execute_object_query ( object_query ) execute_msql_result = result [ "body" ] [ "ExecuteMSQLResult" ] membersuite_object_list = [ ] if execute_msql_result [ "Success" ] : result_value = execute_msql_result [ "ResultValue" ] if result_value [ "ObjectSearchResult" ] [ "Objects" ] : # Multiple results. membersuite_object_list = [ ] for obj in ( result_value [ "ObjectSearchResult" ] [ "Objects" ] [ "MemberSuiteObject" ] ) : membersuite_object = membersuite_object_factory ( obj ) membersuite_object_list . append ( membersuite_object ) elif result_value [ "SingleObject" ] [ "ClassType" ] : # Only one result. membersuite_object = membersuite_object_factory ( execute_msql_result [ "ResultValue" ] [ "SingleObject" ] ) membersuite_object_list . append ( membersuite_object ) elif ( result_value [ "ObjectSearchResult" ] [ "Objects" ] is None and result_value [ "SingleObject" ] [ "ClassType" ] is None ) : raise NoResultsError ( result = execute_msql_result ) return membersuite_object_list else : # @TODO Fix - exposing only the first of possibly many errors here. raise ExecuteMSQLError ( result = execute_msql_result )
7,906
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/utils.py#L57-L99
[ "def", "__ack_ok", "(", "self", ")", ":", "buf", "=", "self", ".", "__create_header", "(", "const", ".", "CMD_ACK_OK", ",", "b''", ",", "self", ".", "__session_id", ",", "const", ".", "USHRT_MAX", "-", "1", ")", "try", ":", "if", "self", ".", "tcp", ":", "top", "=", "self", ".", "__create_tcp_top", "(", "buf", ")", "self", ".", "__sock", ".", "send", "(", "top", ")", "else", ":", "self", ".", "__sock", ".", "sendto", "(", "buf", ",", "self", ".", "__address", ")", "except", "Exception", "as", "e", ":", "raise", "ZKNetworkError", "(", "str", "(", "e", ")", ")" ]
Return the value for key of membersuite_object_data .
def value_for_key ( membersuite_object_data , key ) : key_value_dicts = { d [ 'Key' ] : d [ 'Value' ] for d in membersuite_object_data [ "Fields" ] [ "KeyValueOfstringanyType" ] } return key_value_dicts [ key ]
7,907
https://github.com/AASHE/python-membersuite-api-client/blob/221f5ed8bc7d4424237a4669c5af9edc11819ee9/membersuite_api_client/utils.py#L102-L108
[ "def", "decompose_space", "(", "H", ",", "A", ")", ":", "return", "OperatorTrace", ".", "create", "(", "OperatorTrace", ".", "create", "(", "A", ",", "over_space", "=", "H", ".", "operands", "[", "-", "1", "]", ")", ",", "over_space", "=", "ProductSpace", ".", "create", "(", "*", "H", ".", "operands", "[", ":", "-", "1", "]", ")", ")" ]
Check if the format uses the creation time of the record .
def usesTime ( self , fmt = None ) : if fmt is None : fmt = self . _fmt if not isinstance ( fmt , basestring ) : fmt = fmt [ 0 ] return fmt . find ( '%(asctime)' ) >= 0
7,908
https://github.com/OpenGov/og-python-utils/blob/00f44927383dd1bd6348f47302c4453d56963479/ogutils/loggers/flask.py#L20-L28
[ "def", "evaluate_rpn", "(", "rpn", ")", ":", "vals_stack", "=", "[", "]", "for", "item", "in", "rpn", ":", "if", "item", "in", "_ALL_OPS", ":", "# Apply the operator and push to the task.", "v2", "=", "vals_stack", ".", "pop", "(", ")", "if", "item", "in", "_UNARY_OPS", ":", "res", "=", "_UNARY_OPS", "[", "item", "]", "(", "v2", ")", "elif", "item", "in", "_BIN_OPS", ":", "v1", "=", "vals_stack", ".", "pop", "(", ")", "res", "=", "_BIN_OPS", "[", "item", "]", "(", "v1", ",", "v2", ")", "else", ":", "raise", "ValueError", "(", "\"%s not in unary_ops or bin_ops\"", "%", "str", "(", "item", ")", ")", "vals_stack", ".", "append", "(", "res", ")", "else", ":", "# Push the operand", "vals_stack", ".", "append", "(", "item", ")", "#print(vals_stack)", "assert", "len", "(", "vals_stack", ")", "==", "1", "assert", "isinstance", "(", "vals_stack", "[", "0", "]", ",", "bool", ")", "return", "vals_stack", "[", "0", "]" ]
Determine if a widget is part of Qtmacs widget hierarchy .
def qteIsQtmacsWidget ( widgetObj ) : if widgetObj is None : return False if hasattr ( widgetObj , '_qteAdmin' ) : return True # Keep track of the already visited objects to avoid infinite loops. visited = [ widgetObj ] # Traverse the hierarchy until a parent features the '_qteAdmin' # attribute, the parent is None, or the parent is an already # visited widget. wid = widgetObj . parent ( ) while wid not in visited : if hasattr ( wid , '_qteAdmin' ) : return True elif wid is None : return False else : visited . append ( wid ) wid = wid . parent ( ) return False
7,909
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1057-L1101
[ "def", "get_metadata", "(", "self", ",", "digest", ",", "content", ",", "mime_type", ")", ":", "# XXX: ad-hoc for now, refactor later", "if", "mime_type", ".", "startswith", "(", "\"image/\"", ")", ":", "img", "=", "Image", ".", "open", "(", "BytesIO", "(", "content", ")", ")", "ret", "=", "{", "}", "if", "not", "hasattr", "(", "img", ",", "\"_getexif\"", ")", ":", "return", "{", "}", "info", "=", "img", ".", "_getexif", "(", ")", "if", "not", "info", ":", "return", "{", "}", "for", "tag", ",", "value", "in", "info", ".", "items", "(", ")", ":", "decoded", "=", "TAGS", ".", "get", "(", "tag", ",", "tag", ")", "ret", "[", "\"EXIF:\"", "+", "str", "(", "decoded", ")", "]", "=", "value", "return", "ret", "else", ":", "if", "mime_type", "!=", "\"application/pdf\"", ":", "content", "=", "self", ".", "to_pdf", "(", "digest", ",", "content", ",", "mime_type", ")", "with", "make_temp_file", "(", "content", ")", "as", "in_fn", ":", "try", ":", "output", "=", "subprocess", ".", "check_output", "(", "[", "\"pdfinfo\"", ",", "in_fn", "]", ")", "except", "OSError", ":", "logger", ".", "error", "(", "\"Conversion failed, probably pdfinfo is not installed\"", ")", "raise", "ret", "=", "{", "}", "for", "line", "in", "output", ".", "split", "(", "b\"\\n\"", ")", ":", "if", "b\":\"", "in", "line", ":", "key", ",", "value", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "b\":\"", ",", "1", ")", "key", "=", "str", "(", "key", ")", "ret", "[", "\"PDF:\"", "+", "key", "]", "=", "str", "(", "value", ".", "strip", "(", ")", ",", "errors", "=", "\"replace\"", ")", "return", "ret" ]
Return the parent applet of widgetObj .
def qteGetAppletFromWidget ( widgetObj ) : if widgetObj is None : return None if hasattr ( widgetObj , '_qteAdmin' ) : return widgetObj . _qteAdmin . qteApplet # Keep track of the already visited objects to avoid infinite loops. visited = [ widgetObj ] # Traverse the hierarchy until a parent features the '_qteAdmin' # attribute, the parent is None, or the parent is an already # visited widget. wid = widgetObj . parent ( ) while wid not in visited : if hasattr ( wid , '_qteAdmin' ) : return wid . _qteAdmin . qteApplet elif wid is None : return None else : visited . append ( wid ) wid = wid . parent ( ) return None
7,910
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1104-L1142
[ "def", "describe_topic", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "topics", "=", "list_topics", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "=", "{", "}", "for", "topic", ",", "arn", "in", "topics", ".", "items", "(", ")", ":", "if", "name", "in", "(", "topic", ",", "arn", ")", ":", "ret", "=", "{", "'TopicArn'", ":", "arn", "}", "ret", "[", "'Attributes'", "]", "=", "get_topic_attributes", "(", "arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "[", "'Subscriptions'", "]", "=", "list_subscriptions_by_topic", "(", "arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "# Grab extended attributes for the above subscriptions", "for", "sub", "in", "range", "(", "len", "(", "ret", "[", "'Subscriptions'", "]", ")", ")", ":", "sub_arn", "=", "ret", "[", "'Subscriptions'", "]", "[", "sub", "]", "[", "'SubscriptionArn'", "]", "if", "not", "sub_arn", ".", "startswith", "(", "'arn:aws:sns:'", ")", ":", "# Sometimes a sub is in e.g. PendingAccept or other", "# wierd states and doesn't have an ARN yet", "log", ".", "debug", "(", "'Subscription with invalid ARN %s skipped...'", ",", "sub_arn", ")", "continue", "deets", "=", "get_subscription_attributes", "(", "SubscriptionArn", "=", "sub_arn", ",", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "ret", "[", "'Subscriptions'", "]", "[", "sub", "]", ".", "update", "(", "deets", ")", "return", "ret" ]
Specify that the message will be delivered with the hook name .
def setHookName ( self , name : str ) : self . isHook = True self . messengerName = name
7,911
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L86-L91
[ "def", "_scale_to_int", "(", "X", ",", "max_val", "=", "None", ")", ":", "if", "max_val", "is", "None", ":", "X", "=", "X", "/", "_gcd_array", "(", "X", ")", "else", ":", "X", "=", "X", "/", "max", "(", "1", "/", "max_val", ",", "_gcd_array", "(", "X", ")", ")", "return", "[", "int", "(", "entry", ")", "for", "entry", "in", "X", "]" ]
Specify that the message will be delivered with the signal name .
def setSignalName ( self , name : str ) : self . isHook = False self . messengerName = name
7,912
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L94-L99
[ "def", "_create_auth", "(", "team", ",", "timeout", "=", "None", ")", ":", "url", "=", "get_registry_url", "(", "team", ")", "contents", "=", "_load_auth", "(", ")", "auth", "=", "contents", ".", "get", "(", "url", ")", "if", "auth", "is", "not", "None", ":", "# If the access token expires within a minute, update it.", "if", "auth", "[", "'expires_at'", "]", "<", "time", ".", "time", "(", ")", "+", "60", ":", "try", ":", "auth", "=", "_update_auth", "(", "team", ",", "auth", "[", "'refresh_token'", "]", ",", "timeout", ")", "except", "CommandException", "as", "ex", ":", "raise", "CommandException", "(", "\"Failed to update the access token (%s). Run `quilt login%s` again.\"", "%", "(", "ex", ",", "' '", "+", "team", "if", "team", "else", "''", ")", ")", "contents", "[", "url", "]", "=", "auth", "_save_auth", "(", "contents", ")", "return", "auth" ]
Set the policy on how Qtmacs filters keyboard events for a particular widgets .
def qteSetKeyFilterPolicy ( self , receiveBefore : bool = False , useQtmacs : bool = None , receiveAfter : bool = False ) : # Store key filter policy flags. self . filterKeyEvents = useQtmacs self . receiveBeforeQtmacsParser = receiveBefore self . receiveAfterQtmacsParser = receiveAfter
7,913
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L229-L287
[ "def", "_advance_cluster_time", "(", "self", ",", "cluster_time", ")", ":", "if", "self", ".", "_cluster_time", "is", "None", ":", "self", ".", "_cluster_time", "=", "cluster_time", "elif", "cluster_time", "is", "not", "None", ":", "if", "cluster_time", "[", "\"clusterTime\"", "]", ">", "self", ".", "_cluster_time", "[", "\"clusterTime\"", "]", ":", "self", ".", "_cluster_time", "=", "cluster_time" ]
Append another key to the key sequence represented by this object .
def appendQKeyEvent ( self , keyEvent : QtGui . QKeyEvent ) : # Store the QKeyEvent. self . keylistKeyEvent . append ( keyEvent ) # Convenience shortcuts. mod = keyEvent . modifiers ( ) key = keyEvent . key ( ) # Add the modifier and key to the list. The modifier is a # QFlag structure and must by typecast to an integer to avoid # difficulties with the hashing in the ``match`` routine of # the ``QtmacsKeymap`` object. self . keylistQtConstants . append ( ( int ( mod ) , key ) )
7,914
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L694-L721
[ "def", "derivativeY", "(", "self", ",", "mLvl", ",", "pLvl", ",", "MedShk", ")", ":", "xLvl", "=", "self", ".", "xFunc", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dxdp", "=", "self", ".", "xFunc", ".", "derivativeY", "(", "mLvl", ",", "pLvl", ",", "MedShk", ")", "dcdx", "=", "self", ".", "cFunc", ".", "derivativeX", "(", "xLvl", ",", "MedShk", ")", "dcdp", "=", "dxdp", "*", "dcdx", "dMeddp", "=", "(", "dxdp", "-", "dcdp", ")", "/", "self", ".", "MedPrice", "return", "dcdp", ",", "dMeddp" ]
Insert a new key into the key map and associate it with a macro .
def qteInsertKey ( self , keysequence : QtmacsKeysequence , macroName : str ) : # Get a dedicated reference to self to facilitate traversing # through the key map. keyMap = self # Get the key sequence as a list of tuples, where each tuple # contains the the control modifier and the key code, and both # are specified as Qt constants. keysequence = keysequence . toQtKeylist ( ) # Traverse the shortcut sequence and generate new keys as # necessary. for key in keysequence [ : - 1 ] : # If the key does not yet exist add an empty dictionary # (it will be filled later). if key not in keyMap : keyMap [ key ] = { } # Similarly, if the key does exist but references anything # other than a dictionary (eg. a previously installed # ``QtmacdMacro`` instance), then delete it. if not isinstance ( keyMap [ key ] , dict ) : keyMap [ key ] = { } # Go one level down in the key-map tree. keyMap = keyMap [ key ] # Assign the new macro object associated with this key. keyMap [ keysequence [ - 1 ] ] = macroName
7,915
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L873-L922
[ "def", "_generate_examples_validation", "(", "self", ",", "archive", ",", "labels", ")", ":", "# Get the current random seeds.", "numpy_st0", "=", "np", ".", "random", ".", "get_state", "(", ")", "# Set new random seeds.", "np", ".", "random", ".", "seed", "(", "135", ")", "logging", ".", "warning", "(", "'Overwriting cv2 RNG seed.'", ")", "tfds", ".", "core", ".", "lazy_imports", ".", "cv2", ".", "setRNGSeed", "(", "357", ")", "for", "example", "in", "super", "(", "Imagenet2012Corrupted", ",", "self", ")", ".", "_generate_examples_validation", "(", "archive", ",", "labels", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "tf_img", "=", "tf", ".", "image", ".", "decode_jpeg", "(", "example", "[", "'image'", "]", ".", "read", "(", ")", ",", "channels", "=", "3", ")", "image_np", "=", "tfds", ".", "as_numpy", "(", "tf_img", ")", "example", "[", "'image'", "]", "=", "self", ".", "_get_corrupted_example", "(", "image_np", ")", "yield", "example", "# Reset the seeds back to their original values.", "np", ".", "random", ".", "set_state", "(", "numpy_st0", ")" ]
Remove keysequence from this key map .
def qteRemoveKey ( self , keysequence : QtmacsKeysequence ) : # Get a dedicated reference to self to facilitate traversing # through the key map. keyMap = self # Keep a reference to the root element in the key map. keyMapRef = keyMap # Get the key sequence as a list of tuples, where each tuple # contains the the control modifier and the key code, and both # are specified as Qt constants. keysequence = keysequence . toQtKeylist ( ) # ------------------------------------------------------------ # Remove the leaf element from the tree. # ------------------------------------------------------------ for key in keysequence [ : - 1 ] : # Quit if the key does not exist. This can happen if the # user tries to remove a key that has never been # registered. if key not in keyMap : return # Go one level down in the key-map tree. keyMap = keyMap [ key ] # The specified key sequence does not exist if the leaf # element (ie. last entry in the key sequence) is missing. if keysequence [ - 1 ] not in keyMap : return else : # Remove the leaf. keyMap . pop ( keysequence [ - 1 ] ) # ------------------------------------------------------------ # Prune the prefix path defined by ``keysequence`` and remove # all empty dictionaries. Start at the leaf level. # ------------------------------------------------------------ # Drop the last element in the key sequence, because it was # removed in the above code fragment already. keysequence = keysequence [ : - 1 ] # Now successively remove the key sequence in reverse order. while ( len ( keysequence ) ) : # Start at the root and move to the last branch level # before the leaf level. keyMap = keyMapRef for key in keysequence [ : - 1 ] : keyMap = keyMap [ key ] # If the leaf is a non-empty dictionary then another key # with the same prefix still exists. In this case do # nothing. However, if the leaf is now empty it must be # removed. if len ( keyMap [ key ] ) : return else : keyMap . pop ( key )
7,916
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L925-L999
[ "def", "_put_bucket_cors", "(", "self", ")", ":", "if", "self", ".", "s3props", "[", "'cors'", "]", "[", "'enabled'", "]", "and", "self", ".", "s3props", "[", "'website'", "]", "[", "'enabled'", "]", ":", "cors_config", "=", "{", "}", "cors_rules", "=", "[", "]", "for", "each_rule", "in", "self", ".", "s3props", "[", "'cors'", "]", "[", "'cors_rules'", "]", ":", "cors_rules", ".", "append", "(", "{", "'AllowedHeaders'", ":", "each_rule", "[", "'cors_headers'", "]", ",", "'AllowedMethods'", ":", "each_rule", "[", "'cors_methods'", "]", ",", "'AllowedOrigins'", ":", "each_rule", "[", "'cors_origins'", "]", ",", "'ExposeHeaders'", ":", "each_rule", "[", "'cors_expose_headers'", "]", ",", "'MaxAgeSeconds'", ":", "each_rule", "[", "'cors_max_age'", "]", "}", ")", "cors_config", "=", "{", "'CORSRules'", ":", "cors_rules", "}", "LOG", ".", "debug", "(", "cors_config", ")", "_response", "=", "self", ".", "s3client", ".", "put_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ",", "CORSConfiguration", "=", "cors_config", ")", "else", ":", "_response", "=", "self", ".", "s3client", ".", "delete_bucket_cors", "(", "Bucket", "=", "self", ".", "bucket", ")", "LOG", ".", "debug", "(", "'Response setting up S3 CORS: %s'", ",", "_response", ")", "LOG", ".", "info", "(", "'S3 CORS configuration updated'", ")" ]
Look up the key sequence in key map .
def match ( self , keysequence : QtmacsKeysequence ) : try : # Look up the ``keysequence`` in the current key map (ie. # this very object which inherits from ``dict``). If # ``keysequence`` does not lead to a valid macro then # return **None**. macroName = self for _ in keysequence . toQtKeylist ( ) : macroName = macroName [ _ ] except KeyError : # This error occurs if the keyboard sequence does not lead # to any macro and is therefore invalid. return ( None , False ) # At this point we know that the key sequence entered so far # exists. Two possibilities from here on forward: 1) the key # sequence now points to a macro or 2) the key sequence is # still incomplete. if isinstance ( macroName , dict ) : # Another dictionary --> key sequence is still incomplete. return ( None , True ) else : # Macro object --> return it. return ( macroName , True )
7,917
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1002-L1050
[ "def", "initialize_dual", "(", "neural_net_params_object", ",", "init_dual_file", "=", "None", ",", "random_init_variance", "=", "0.01", ",", "init_nu", "=", "200.0", ")", ":", "lambda_pos", "=", "[", "]", "lambda_neg", "=", "[", "]", "lambda_quad", "=", "[", "]", "lambda_lu", "=", "[", "]", "if", "init_dual_file", "is", "None", ":", "for", "i", "in", "range", "(", "0", ",", "neural_net_params_object", ".", "num_hidden_layers", "+", "1", ")", ":", "initializer", "=", "(", "np", ".", "random", ".", "uniform", "(", "0", ",", "random_init_variance", ",", "size", "=", "(", "neural_net_params_object", ".", "sizes", "[", "i", "]", ",", "1", ")", ")", ")", ".", "astype", "(", "np", ".", "float32", ")", "lambda_pos", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_pos_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "initializer", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "initializer", "=", "(", "np", ".", "random", ".", "uniform", "(", "0", ",", "random_init_variance", ",", "size", "=", "(", "neural_net_params_object", ".", "sizes", "[", "i", "]", ",", "1", ")", ")", ")", ".", "astype", "(", "np", ".", "float32", ")", "lambda_neg", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_neg_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "initializer", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "initializer", "=", "(", "np", ".", "random", ".", "uniform", "(", "0", ",", "random_init_variance", ",", "size", "=", "(", "neural_net_params_object", ".", "sizes", "[", "i", "]", ",", "1", ")", ")", ")", ".", "astype", "(", "np", ".", "float32", ")", "lambda_quad", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_quad_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "initializer", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "initializer", "=", "(", "np", ".", "random", ".", "uniform", "(", "0", ",", "random_init_variance", ",", "size", "=", "(", "neural_net_params_object", ".", "sizes", "[", "i", "]", ",", "1", ")", ")", ")", ".", "astype", "(", "np", ".", "float32", ")", "lambda_lu", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_lu_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "initializer", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "nu", "=", "tf", ".", "get_variable", "(", "'nu'", ",", "initializer", "=", "init_nu", ")", "else", ":", "# Loading from file", "dual_var_init_val", "=", "np", ".", "load", "(", "init_dual_file", ")", ".", "item", "(", ")", "for", "i", "in", "range", "(", "0", ",", "neural_net_params_object", ".", "num_hidden_layers", "+", "1", ")", ":", "lambda_pos", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_pos_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "dual_var_init_val", "[", "'lambda_pos'", "]", "[", "i", "]", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "lambda_neg", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_neg_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "dual_var_init_val", "[", "'lambda_neg'", "]", "[", "i", "]", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "lambda_quad", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_quad_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "dual_var_init_val", "[", "'lambda_quad'", "]", "[", "i", "]", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "lambda_lu", ".", "append", "(", "tf", ".", "get_variable", "(", "'lambda_lu_'", "+", "str", "(", "i", ")", ",", "initializer", "=", "dual_var_init_val", "[", "'lambda_lu'", "]", "[", "i", "]", ",", "dtype", "=", "tf", ".", "float32", ")", ")", "nu", "=", "tf", ".", "get_variable", "(", "'nu'", ",", "initializer", "=", "1.0", "*", "dual_var_init_val", "[", "'nu'", "]", ")", "dual_var", "=", "{", "'lambda_pos'", ":", "lambda_pos", ",", "'lambda_neg'", ":", "lambda_neg", ",", "'lambda_quad'", ":", "lambda_quad", ",", "'lambda_lu'", ":", "lambda_lu", ",", "'nu'", ":", "nu", "}", "return", "dual_var" ]
Return an instance of a QLabel with the correct color scheme .
def _qteGetLabelInstance ( self ) : # Create a label with the proper colour appearance. layout = self . layout ( ) label = QtGui . QLabel ( self ) style = 'QLabel { background-color : white; color : blue; }' label . setStyleSheet ( style ) return label
7,918
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1190-L1211
[ "def", "_read_body_until_close", "(", "self", ",", "response", ",", "file", ")", ":", "_logger", ".", "debug", "(", "'Reading body until close.'", ")", "file_is_async", "=", "hasattr", "(", "file", ",", "'drain'", ")", "while", "True", ":", "data", "=", "yield", "from", "self", ".", "_connection", ".", "read", "(", "self", ".", "_read_size", ")", "if", "not", "data", ":", "break", "self", ".", "_data_event_dispatcher", ".", "notify_read", "(", "data", ")", "content_data", "=", "self", ".", "_decompress_data", "(", "data", ")", "if", "file", ":", "file", ".", "write", "(", "content_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")", "content_data", "=", "self", ".", "_flush_decompressor", "(", ")", "if", "file", ":", "file", ".", "write", "(", "content_data", ")", "if", "file_is_async", ":", "yield", "from", "file", ".", "drain", "(", ")" ]
Ensure all but the last QLabel are only as wide as necessary .
def _qteUpdateLabelWidths ( self ) : layout = self . layout ( ) # Remove all labels from the list and add them again in the # new order. for ii in range ( layout . count ( ) ) : label = layout . itemAt ( ii ) layout . removeItem ( label ) # Add all labels and ensure they have appropriate width. for item in self . _qteModeList : label = item [ 2 ] width = label . fontMetrics ( ) . size ( 0 , str ( item [ 1 ] ) ) . width ( ) label . setMaximumWidth ( width ) label . setMinimumWidth ( width ) layout . addWidget ( label ) # Remove the width constraint from the last label so that # it can expand to the right. _ , _ , label = self . _qteModeList [ - 1 ] label . setMaximumWidth ( 1600000 )
7,919
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1213-L1253
[ "def", "unsubscribe", "(", "self", ",", "subscription", ")", ":", "params", "=", "{", "'ContentType'", ":", "'JSON'", ",", "'SubscriptionArn'", ":", "subscription", "}", "response", "=", "self", ".", "make_request", "(", "'Unsubscribe'", ",", "params", ",", "'/'", ",", "'GET'", ")", "body", "=", "response", ".", "read", "(", ")", "if", "response", ".", "status", "==", "200", ":", "return", "json", ".", "loads", "(", "body", ")", "else", ":", "boto", ".", "log", ".", "error", "(", "'%s %s'", "%", "(", "response", ".", "status", ",", "response", ".", "reason", ")", ")", "boto", ".", "log", ".", "error", "(", "'%s'", "%", "body", ")", "raise", "self", ".", "ResponseError", "(", "response", ".", "status", ",", "response", ".", "reason", ",", "body", ")" ]
Return a tuple containing the mode its value and its associated QLabel instance .
def qteGetMode ( self , mode : str ) : for item in self . _qteModeList : if item [ 0 ] == mode : return item return None
7,920
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1256-L1276
[ "def", "get_salt", "(", "request", ")", ":", "try", ":", "username", "=", "request", ".", "POST", "[", "\"username\"", "]", "except", "KeyError", ":", "# log.error(\"No 'username' in POST data?!?\")", "return", "HttpResponseBadRequest", "(", ")", "try", ":", "request", ".", "server_challenge", "=", "request", ".", "session", "[", "SERVER_CHALLENGE_KEY", "]", "except", "KeyError", "as", "err", ":", "# log.error(\"Can't get challenge from session: %s\", err)", "return", "HttpResponseBadRequest", "(", ")", "# log.debug(\"old challenge: %r\", request.server_challenge)", "send_pseudo_salt", "=", "True", "form", "=", "UsernameForm", "(", "request", ",", "data", "=", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "send_pseudo_salt", "=", "False", "user_profile", "=", "form", ".", "user_profile", "init_pbkdf2_salt", "=", "user_profile", ".", "init_pbkdf2_salt", "if", "not", "init_pbkdf2_salt", ":", "# log.error(\"No init_pbkdf2_salt set in user profile!\")", "send_pseudo_salt", "=", "True", "if", "len", "(", "init_pbkdf2_salt", ")", "!=", "app_settings", ".", "PBKDF2_SALT_LENGTH", ":", "# log.error(\"Salt for user %r has wrong length: %r\" % (request.POST[\"username\"], init_pbkdf2_salt))", "send_pseudo_salt", "=", "True", "# else:", "# log.error(\"Salt Form is not valid: %r\", form.errors)", "if", "send_pseudo_salt", ":", "# log.debug(\"\\nUse pseudo salt!!!\")", "init_pbkdf2_salt", "=", "crypt", ".", "get_pseudo_salt", "(", "app_settings", ".", "PBKDF2_SALT_LENGTH", ",", "username", ")", "response", "=", "HttpResponse", "(", "init_pbkdf2_salt", ",", "content_type", "=", "\"text/plain\"", ")", "if", "not", "send_pseudo_salt", ":", "response", ".", "add_duration", "=", "True", "# collect duration time in @TimingAttackPreventer", "# log.debug(\"\\nsend init_pbkdf2_salt %r to client.\", init_pbkdf2_salt)", "return", "response" ]
Append label for mode and display value on it .
def qteAddMode ( self , mode : str , value ) : # Add the label to the layout and the local mode list. label = self . _qteGetLabelInstance ( ) label . setText ( value ) self . _qteModeList . append ( ( mode , value , label ) ) self . _qteUpdateLabelWidths ( )
7,921
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1279-L1300
[ "def", "extract_cookies_to_jar", "(", "jar", ",", "request", ",", "response", ")", ":", "if", "not", "(", "hasattr", "(", "response", ",", "'_original_response'", ")", "and", "response", ".", "_original_response", ")", ":", "return", "# the _original_response field is the wrapped httplib.HTTPResponse object,", "req", "=", "MockRequest", "(", "request", ")", "# pull out the HTTPMessage with the headers and put it in the mock:", "res", "=", "MockResponse", "(", "response", ".", "_original_response", ".", "msg", ")", "jar", ".", "extract_cookies", "(", "res", ",", "req", ")" ]
Change the value of mode to value .
def qteChangeModeValue ( self , mode : str , value ) : # Search through the list for ``mode``. for idx , item in enumerate ( self . _qteModeList ) : if item [ 0 ] == mode : # Update the displayed value in the label. label = item [ 2 ] label . setText ( value ) # Overwrite the old data record with the updated one # and adjust the widths of the modes. self . _qteModeList [ idx ] = ( mode , value , label ) self . _qteUpdateLabelWidths ( ) return True return False
7,922
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1303-L1336
[ "def", "deserialize_footer", "(", "stream", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting footer deserialization\"", ")", "signature", "=", "b\"\"", "if", "verifier", "is", "None", ":", "return", "MessageFooter", "(", "signature", "=", "signature", ")", "try", ":", "(", "sig_len", ",", ")", "=", "unpack_values", "(", "\">H\"", ",", "stream", ")", "(", "signature", ",", ")", "=", "unpack_values", "(", "\">{sig_len}s\"", ".", "format", "(", "sig_len", "=", "sig_len", ")", ",", "stream", ")", "except", "SerializationError", ":", "raise", "SerializationError", "(", "\"No signature found in message\"", ")", "if", "verifier", ":", "verifier", ".", "verify", "(", "signature", ")", "return", "MessageFooter", "(", "signature", "=", "signature", ")" ]
Insert mode at position pos .
def qteInsertMode ( self , pos : int , mode : str , value ) : # Add the label to the list. label = self . _qteGetLabelInstance ( ) label . setText ( value ) self . _qteModeList . insert ( pos , ( mode , value , label ) ) self . _qteUpdateLabelWidths ( )
7,923
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1339-L1365
[ "def", "delete_group", "(", "self", ",", "group_id", ",", "force", "=", "False", ")", ":", "params", "=", "{", "'force'", ":", "force", "}", "response", "=", "self", ".", "_do_request", "(", "'DELETE'", ",", "'/v2/groups/{group_id}'", ".", "format", "(", "group_id", "=", "group_id", ")", ",", "params", "=", "params", ")", "return", "response", ".", "json", "(", ")" ]
Remove mode and associated label .
def qteRemoveMode ( self , mode : str ) : # Search through the list for ``mode``. for idx , item in enumerate ( self . _qteModeList ) : if item [ 0 ] == mode : # Remove the record and delete the label. self . _qteModeList . remove ( item ) item [ 2 ] . hide ( ) item [ 2 ] . deleteLater ( ) self . _qteUpdateLabelWidths ( ) return True return False
7,924
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/auxiliary.py#L1368-L1398
[ "def", "start", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "is_running", "(", ")", ":", "self", ".", "websock_url", "=", "self", ".", "chrome", ".", "start", "(", "*", "*", "kwargs", ")", "self", ".", "websock", "=", "websocket", ".", "WebSocketApp", "(", "self", ".", "websock_url", ")", "self", ".", "websock_thread", "=", "WebsockReceiverThread", "(", "self", ".", "websock", ",", "name", "=", "'WebsockThread:%s'", "%", "self", ".", "chrome", ".", "port", ")", "self", ".", "websock_thread", ".", "start", "(", ")", "self", ".", "_wait_for", "(", "lambda", ":", "self", ".", "websock_thread", ".", "is_open", ",", "timeout", "=", "30", ")", "# tell browser to send us messages we're interested in", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Page.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Console.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'Runtime.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.enable'", ")", "self", ".", "send_to_chrome", "(", "method", "=", "'ServiceWorker.setForceUpdateOnPageLoad'", ")", "# disable google analytics", "self", ".", "send_to_chrome", "(", "method", "=", "'Network.setBlockedURLs'", ",", "params", "=", "{", "'urls'", ":", "[", "'*google-analytics.com/analytics.js'", ",", "'*google-analytics.com/ga.js'", "]", "}", ")" ]
Get the base and meta classes to use in creating a subclass .
def _get_bases ( type_ ) : # type: (type) -> Tuple[type, type] try : class _ ( type_ ) : # type: ignore """Check if type_ is subclassable.""" BaseClass = type_ except TypeError : BaseClass = object class MetaClass ( _ValidationMeta , BaseClass . __class__ ) : # type: ignore """Use the type_ meta and include base validation functionality.""" return BaseClass , MetaClass
7,925
https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/validation.py#L158-L180
[ "def", "transaction", "(", "self", ",", "collections", ",", "action", ",", "waitForSync", "=", "False", ",", "lockTimeout", "=", "None", ",", "params", "=", "None", ")", ":", "payload", "=", "{", "\"collections\"", ":", "collections", ",", "\"action\"", ":", "action", ",", "\"waitForSync\"", ":", "waitForSync", "}", "if", "lockTimeout", "is", "not", "None", ":", "payload", "[", "\"lockTimeout\"", "]", "=", "lockTimeout", "if", "params", "is", "not", "None", ":", "payload", "[", "\"params\"", "]", "=", "params", "self", ".", "connection", ".", "reportStart", "(", "action", ")", "r", "=", "self", ".", "connection", ".", "session", ".", "post", "(", "self", ".", "transactionURL", ",", "data", "=", "json", ".", "dumps", "(", "payload", ",", "default", "=", "str", ")", ")", "self", ".", "connection", ".", "reportItem", "(", ")", "data", "=", "r", ".", "json", "(", ")", "if", "(", "r", ".", "status_code", "==", "200", "or", "r", ".", "status_code", "==", "201", "or", "r", ".", "status_code", "==", "202", ")", "and", "not", "data", ".", "get", "(", "\"error\"", ")", ":", "return", "data", "else", ":", "raise", "TransactionError", "(", "data", "[", "\"errorMessage\"", "]", ",", "action", ",", "data", ")" ]
Instantiate the object if possible .
def _instantiate ( class_ , type_ , __value , * args , * * kwargs ) : try : return class_ ( __value , * args , * * kwargs ) except TypeError : try : return type_ ( __value , * args , * * kwargs ) except Exception : # pylint: disable=broad-except return __value
7,926
https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/validation.py#L183-L205
[ "def", "delete_topic_groups", "(", "self", ",", "group_id", ",", "topic_id", ")", ":", "path", "=", "{", "}", "data", "=", "{", "}", "params", "=", "{", "}", "# REQUIRED - PATH - group_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"group_id\"", "]", "=", "group_id", "# REQUIRED - PATH - topic_id\r", "\"\"\"ID\"\"\"", "path", "[", "\"topic_id\"", "]", "=", "topic_id", "self", ".", "logger", ".", "debug", "(", "\"DELETE /api/v1/groups/{group_id}/discussion_topics/{topic_id} with query params: {params} and form data: {data}\"", ".", "format", "(", "params", "=", "params", ",", "data", "=", "data", ",", "*", "*", "path", ")", ")", "return", "self", ".", "generic_request", "(", "\"DELETE\"", ",", "\"/api/v1/groups/{group_id}/discussion_topics/{topic_id}\"", ".", "format", "(", "*", "*", "path", ")", ",", "data", "=", "data", ",", "params", "=", "params", ",", "no_data", "=", "True", ")" ]
Get the full name of an object including the module .
def _get_fullname ( obj ) : # type: (Any) -> str if not hasattr ( obj , "__name__" ) : obj = obj . __class__ if obj . __module__ in ( "builtins" , "__builtin__" ) : return obj . __name__ return "{}.{}" . format ( obj . __module__ , obj . __name__ )
7,927
https://github.com/contains-io/typet/blob/ad5087c567af84db299eca186776e1cee228e442/typet/validation.py#L297-L311
[ "def", "port_ranges", "(", ")", ":", "try", ":", "return", "_linux_ranges", "(", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "# not linux, try BSD", "try", ":", "ranges", "=", "_bsd_ranges", "(", ")", "if", "ranges", ":", "return", "ranges", "except", "(", "OSError", ",", "IOError", ")", ":", "pass", "# fallback", "return", "[", "DEFAULT_EPHEMERAL_PORT_RANGE", "]" ]
Gets a value of key .
def get ( self , key , recursive = False , sorted = False , quorum = False , timeout = None ) : return self . adapter . get ( key , recursive = recursive , sorted = sorted , quorum = quorum , timeout = timeout )
7,928
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L29-L33
[ "def", "rest_verbs", "(", "http_method_names", "=", "None", ")", ":", "http_method_names", "=", "[", "'GET'", "]", "if", "(", "http_method_names", "is", "None", ")", "else", "http_method_names", "def", "decorator", "(", "func", ")", ":", "WrappedRestView", "=", "type", "(", "six", ".", "PY3", "and", "'WrappedRestView'", "or", "b'WrappedRestView'", ",", "(", "RestView", ",", ")", ",", "{", "'__doc__'", ":", "func", ".", "__doc__", "}", ")", "# Note, the above allows us to set the docstring.", "# It is the equivalent of:", "#", "# class WrappedRestView(RestView):", "# pass", "# WrappedRestView.__doc__ = func.doc <--- Not possible to do this", "# api_view applied without (method_names)", "assert", "not", "(", "isinstance", "(", "http_method_names", ",", "types", ".", "FunctionType", ")", ")", ",", "'@api_view missing list of allowed HTTP methods'", "# api_view applied with eg. string instead of list of strings", "assert", "isinstance", "(", "http_method_names", ",", "(", "list", ",", "tuple", ")", ")", ",", "'@api_view expected a list of strings, received %s'", "%", "type", "(", "http_method_names", ")", ".", "__name__", "allowed_methods", "=", "set", "(", "http_method_names", ")", "|", "set", "(", "(", "'options'", ",", ")", ")", "WrappedRestView", ".", "http_method_names", "=", "[", "method", ".", "lower", "(", ")", "for", "method", "in", "allowed_methods", "]", "def", "handler", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "for", "method", "in", "http_method_names", ":", "setattr", "(", "WrappedRestView", ",", "method", ".", "lower", "(", ")", ",", "handler", ")", "WrappedRestView", ".", "__name__", "=", "func", ".", "__name__", "WrappedRestView", ".", "renderer_classes", "=", "getattr", "(", "func", ",", "'renderer_classes'", ",", "RestView", ".", "renderer_classes", ")", "WrappedRestView", ".", "parser_classes", "=", "getattr", "(", "func", ",", "'parser_classes'", ",", "RestView", ".", "parser_classes", ")", "WrappedRestView", ".", "authentication_classes", "=", "getattr", "(", "func", ",", "'authentication_classes'", ",", "RestView", ".", "authentication_classes", ")", "WrappedRestView", ".", "throttle_classes", "=", "getattr", "(", "func", ",", "'throttle_classes'", ",", "RestView", ".", "throttle_classes", ")", "WrappedRestView", ".", "permission_classes", "=", "getattr", "(", "func", ",", "'permission_classes'", ",", "RestView", ".", "permission_classes", ")", "return", "WrappedRestView", ".", "as_view", "(", ")", "return", "decorator" ]
Waits until a node changes .
def wait ( self , key , index = 0 , recursive = False , sorted = False , quorum = False , timeout = None ) : return self . adapter . get ( key , recursive = recursive , sorted = sorted , quorum = quorum , wait = True , wait_index = index , timeout = timeout )
7,929
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L35-L40
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Sets only a TTL of a key . The waiters doesn t receive notification by this operation .
def refresh ( self , key , ttl , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , ttl = ttl , refresh = True , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
7,930
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L49-L56
[ "def", "_read_atlas_zonefile", "(", "zonefile_path", ",", "zonefile_hash", ")", ":", "with", "open", "(", "zonefile_path", ",", "\"rb\"", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "# sanity check ", "if", "zonefile_hash", "is", "not", "None", ":", "if", "not", "verify_zonefile", "(", "data", ",", "zonefile_hash", ")", ":", "log", ".", "debug", "(", "\"Corrupt zonefile '%s'\"", "%", "zonefile_hash", ")", "return", "None", "return", "data" ]
Creates a new key .
def create ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , prev_exist = False , timeout = timeout )
7,931
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L58-L61
[ "def", "oauth_error_handler", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# OAuthErrors should not happen, so they are not caught here. Hence", "# they will result in a 500 Internal Server Error which is what we", "# are interested in.", "try", ":", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "OAuthClientError", "as", "e", ":", "current_app", ".", "logger", ".", "warning", "(", "e", ".", "message", ",", "exc_info", "=", "True", ")", "return", "oauth2_handle_error", "(", "e", ".", "remote", ",", "e", ".", "response", ",", "e", ".", "code", ",", "e", ".", "uri", ",", "e", ".", "description", ")", "except", "OAuthCERNRejectedAccountError", "as", "e", ":", "current_app", ".", "logger", ".", "warning", "(", "e", ".", "message", ",", "exc_info", "=", "True", ")", "flash", "(", "_", "(", "'CERN account not allowed.'", ")", ",", "category", "=", "'danger'", ")", "return", "redirect", "(", "'/'", ")", "except", "OAuthRejectedRequestError", ":", "flash", "(", "_", "(", "'You rejected the authentication request.'", ")", ",", "category", "=", "'info'", ")", "return", "redirect", "(", "'/'", ")", "except", "AlreadyLinkedError", ":", "flash", "(", "_", "(", "'External service is already linked to another account.'", ")", ",", "category", "=", "'danger'", ")", "return", "redirect", "(", "url_for", "(", "'invenio_oauthclient_settings.index'", ")", ")", "return", "inner" ]
Updates an existing key .
def update ( self , key , value = None , dir = False , ttl = None , refresh = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . set ( key , value , dir = dir , ttl = ttl , refresh = refresh , prev_value = prev_value , prev_index = prev_index , prev_exist = True , timeout = timeout )
7,932
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L63-L68
[ "def", "onboarding_message", "(", "*", "*", "payload", ")", ":", "# Get WebClient so you can communicate back to Slack.", "web_client", "=", "payload", "[", "\"web_client\"", "]", "# Get the id of the Slack user associated with the incoming event", "user_id", "=", "payload", "[", "\"data\"", "]", "[", "\"user\"", "]", "[", "\"id\"", "]", "# Open a DM with the new user.", "response", "=", "web_client", ".", "im_open", "(", "user_id", ")", "channel", "=", "response", "[", "\"channel\"", "]", "[", "\"id\"", "]", "# Post the onboarding message.", "start_onboarding", "(", "web_client", ",", "user_id", ",", "channel", ")" ]
Creates a new automatically increasing key in the given directory key .
def append ( self , key , value = None , dir = False , ttl = None , timeout = None ) : return self . adapter . append ( key , value , dir = dir , ttl = ttl , timeout = timeout )
7,933
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L70-L75
[ "def", "prettyprint", "(", "datastr", ")", ":", "maxwidth", "=", "WPToolsQuery", ".", "MAXWIDTH", "rpad", "=", "WPToolsQuery", ".", "RPAD", "extent", "=", "maxwidth", "-", "(", "rpad", "+", "2", ")", "for", "line", "in", "datastr", ":", "if", "len", "(", "line", ")", ">=", "maxwidth", ":", "line", "=", "line", "[", ":", "extent", "]", "+", "'...'", "utils", ".", "stderr", "(", "line", ")" ]
Deletes a key .
def delete ( self , key , dir = False , recursive = False , prev_value = None , prev_index = None , timeout = None ) : return self . adapter . delete ( key , dir = dir , recursive = recursive , prev_value = prev_value , prev_index = prev_index , timeout = timeout )
7,934
https://github.com/sublee/etc/blob/f2be64604da5af0d7739cfacf36f55712f0fc5cb/etc/client.py#L77-L82
[ "def", "get_closest_sibling_state", "(", "state_m", ",", "from_logical_port", "=", "None", ")", ":", "if", "not", "state_m", ".", "parent", ":", "logger", ".", "warning", "(", "\"A state can not have a closest sibling state if it has not parent as {0}\"", ".", "format", "(", "state_m", ")", ")", "return", "margin", "=", "cal_margin", "(", "state_m", ".", "parent", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", ")", "pos", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "size", "=", "state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "# otherwise measure from reference state itself", "if", "from_logical_port", "in", "[", "\"outcome\"", ",", "\"income\"", "]", ":", "size", "=", "(", "margin", ",", "margin", ")", "if", "from_logical_port", "==", "\"outcome\"", ":", "outcomes_m", "=", "[", "outcome_m", "for", "outcome_m", "in", "state_m", ".", "outcomes", "if", "outcome_m", ".", "outcome", ".", "outcome_id", ">=", "0", "]", "free_outcomes_m", "=", "[", "oc_m", "for", "oc_m", "in", "outcomes_m", "if", "not", "state_m", ".", "state", ".", "parent", ".", "get_transition_for_outcome", "(", "state_m", ".", "state", ",", "oc_m", ".", "outcome", ")", "]", "if", "free_outcomes_m", ":", "outcome_m", "=", "free_outcomes_m", "[", "0", "]", "else", ":", "outcome_m", "=", "outcomes_m", "[", "0", "]", "pos", "=", "add_pos", "(", "pos", ",", "outcome_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "elif", "from_logical_port", "==", "\"income\"", ":", "pos", "=", "add_pos", "(", "pos", ",", "state_m", ".", "income", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", ")", "min_distance", "=", "None", "for", "sibling_state_m", "in", "state_m", ".", "parent", ".", "states", ".", "values", "(", ")", ":", "if", "sibling_state_m", "is", "state_m", ":", "continue", "sibling_pos", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'rel_pos'", "]", "sibling_size", "=", "sibling_state_m", ".", "get_meta_data_editor", "(", ")", "[", "'size'", "]", "distance", "=", "geometry", ".", "cal_dist_between_2_coord_frame_aligned_boxes", "(", "pos", ",", "size", ",", "sibling_pos", ",", "sibling_size", ")", "if", "not", "min_distance", "or", "min_distance", "[", "0", "]", ">", "distance", ":", "min_distance", "=", "(", "distance", ",", "sibling_state_m", ")", "return", "min_distance" ]
finds spike containers in a multi spike containers collection
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : res = [ i for i , sc in enumerate ( self . spike_containers ) if cell_filter ( sc . meta , cell_designation ) ] if len ( res ) > 0 : return res [ 0 ]
7,935
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L145-L151
[ "def", "align_options", "(", "options", ")", ":", "l", "=", "0", "for", "opt", "in", "options", ":", "if", "len", "(", "opt", "[", "0", "]", ")", ">", "l", ":", "l", "=", "len", "(", "opt", "[", "0", "]", ")", "s", "=", "[", "]", "for", "opt", "in", "options", ":", "s", ".", "append", "(", "' {0}{1} {2}'", ".", "format", "(", "opt", "[", "0", "]", ",", "' '", "*", "(", "l", "-", "len", "(", "opt", "[", "0", "]", ")", ")", ",", "opt", "[", "1", "]", ")", ")", "return", "'\\n'", ".", "join", "(", "s", ")" ]
Calculates the length of the Label Dimension from its minimum maximum and wether it is discrete .
def len ( self , resolution = 1.0 , units = None , conversion_function = convert_time , end_at_end = True ) : if units is not None : resolution = conversion_function ( resolution , from_units = units , to_units = self . units ) else : units = self . units if self . min is None : return int ( self . max / resolution ) if self . max is None : return 0 if units != '1' and end_at_end : return int ( np . ceil ( ( self . max - self . min ) / resolution ) ) return int ( np . ceil ( ( self . max - self . min ) / resolution ) + 1 )
7,936
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L282-L301
[ "def", "_handle_log_rotations", "(", "self", ")", ":", "for", "h", "in", "self", ".", "capture_handlers", ":", "if", "self", ".", "_should_rotate_log", "(", "h", ")", ":", "self", ".", "_rotate_log", "(", "h", ")" ]
bins overwrites resolution
def logspace ( self , bins = None , units = None , conversion_function = convert_time , resolution = None , end_at_end = True ) : if type ( bins ) in [ list , np . ndarray ] : return bins min = conversion_function ( self . min , from_units = self . units , to_units = units ) max = conversion_function ( self . max , from_units = self . units , to_units = units ) if units is None : units = self . units if resolution is None : resolution = 1.0 if bins is None : bins = self . len ( resolution = resolution , units = units , conversion_function = conversion_function ) # + 1 if units != '1' and end_at_end : # continuous variable behaviour: # we end with the last valid value at the outer edge return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins + 1 ) [ : - 1 ] # discrete variable behaviour: # we end with the last valid value as its own bin return np . logspace ( np . log10 ( min ) , np . log10 ( max ) , bins )
7,937
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L363-L381
[ "def", "_generateForOAuthSecurity", "(", "self", ",", "client_id", ",", "secret_id", ",", "token_url", "=", "None", ")", ":", "grant_type", "=", "\"client_credentials\"", "if", "token_url", "is", "None", ":", "token_url", "=", "\"https://www.arcgis.com/sharing/rest/oauth2/token\"", "params", "=", "{", "\"client_id\"", ":", "client_id", ",", "\"client_secret\"", ":", "secret_id", ",", "\"grant_type\"", ":", "grant_type", ",", "\"f\"", ":", "\"json\"", "}", "token", "=", "self", ".", "_post", "(", "url", "=", "token_url", ",", "param_dict", "=", "params", ",", "securityHandler", "=", "None", ",", "proxy_port", "=", "self", ".", "_proxy_port", ",", "proxy_url", "=", "self", ".", "_proxy_url", ")", "if", "'access_token'", "in", "token", ":", "self", ".", "_token", "=", "token", "[", "'access_token'", "]", "self", ".", "_expires_in", "=", "token", "[", "'expires_in'", "]", "self", ".", "_token_created_on", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "self", ".", "_token_expires_on", "=", "self", ".", "_token_created_on", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "int", "(", "token", "[", "'expires_in'", "]", ")", ")", "self", ".", "_valid", "=", "True", "self", ".", "_message", "=", "\"Token Generated\"", "else", ":", "self", ".", "_token", "=", "None", "self", ".", "_expires_in", "=", "None", "self", ".", "_token_created_on", "=", "None", "self", ".", "_token_expires_on", "=", "None", "self", ".", "_valid", "=", "False", "self", ".", "_message", "=", "token" ]
Creates a list of dictionaries which each give a constraint for a certain section of the dimension .
def constraint_range_dict ( self , * args , * * kwargs ) : bins = self . bins ( * args , * * kwargs ) return [ { self . name + '__gte' : a , self . name + '__lt' : b } for a , b in zip ( bins [ : - 1 ] , bins [ 1 : ] ) ] space = self . space ( * args , * * kwargs ) resolution = space [ 1 ] - space [ 0 ] return [ { self . name + '__gte' : s , self . name + '__lt' : s + resolution } for s in space ]
7,938
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L392-L403
[ "def", "remove_user_from_group", "(", "self", ",", "username", ",", "group_name", ")", ":", "log", ".", "warning", "(", "'Removing user from a group...'", ")", "url", "=", "'rest/api/2/group/user'", "params", "=", "{", "'groupname'", ":", "group_name", ",", "'username'", ":", "username", "}", "return", "self", ".", "delete", "(", "url", ",", "params", "=", "params", ")" ]
Takes a string or a function to find a set of label indizes that match . If the string starts with a ~ the label only has to contain the string .
def find_labels ( self , key , find_in_name = True , find_in_units = False ) : if type ( key ) is str : found_keys = [ ] if key . startswith ( '~' ) : for label_no , label in enumerate ( self . labels ) : if find_in_name and key [ 1 : ] in label . name : found_keys . append ( label_no ) if find_in_units and key [ 1 : ] in label . units : found_keys . append ( label_no ) else : for label_no , label in enumerate ( self . labels ) : if find_in_name and key == label . name : return [ label_no ] if find_in_units and key == label . units : return [ label_no ] return found_keys if hasattr ( key , '__call__' ) : found_keys = [ ] for label_no , label in enumerate ( self . labels ) : if key ( label ) : found_keys . append ( label_no ) return found_keys if type ( key ) is int : return [ key ] if key < self . matrix . shape [ 1 ] else [ ] return [ key ]
7,939
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L462-L491
[ "def", "on_exception", "(", "self", ",", "exception", ")", ":", "logger", ".", "error", "(", "'Exception from stream!'", ",", "exc_info", "=", "True", ")", "self", ".", "streaming_exception", "=", "exception" ]
converts a dimension in place
def convert ( self , label , units = None , conversion_function = convert_time ) : label_no = self . get_label_no ( label ) new_label , new_column = self . get_converted ( label_no , units , conversion_function ) labels = [ LabelDimension ( l ) for l in self . labels ] labels [ label_no ] = new_label matrix = self . matrix . copy ( ) matrix [ : , label_no ] = new_column return LabeledMatrix ( matrix , labels )
7,940
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L597-L605
[ "def", "available_files", "(", "url", ")", ":", "req", "=", "requests", ".", "get", "(", "url", ")", "if", "req", ".", "status_code", "!=", "200", ":", "raise", "base", ".", "FailedDownloadException", "(", "'Failed to download data (status {}) from {}!'", ".", "format", "(", "req", ".", "status_code", ",", "url", ")", ")", "page_content", "=", "req", ".", "text", "link_pattern", "=", "re", ".", "compile", "(", "r'<a href=\"(.*?)\">(.*?)</a>'", ")", "available_files", "=", "[", "]", "for", "match", "in", "link_pattern", ".", "findall", "(", "page_content", ")", ":", "if", "match", "[", "0", "]", ".", "endswith", "(", "'.tgz'", ")", ":", "available_files", ".", "append", "(", "os", ".", "path", ".", "join", "(", "url", ",", "match", "[", "0", "]", ")", ")", "return", "available_files" ]
returns labels which have updated minima and maxima depending on the kwargs supplied to this
def _get_constrained_labels ( self , remove_dimensions = False , * * kwargs ) : new_labels = [ ] for label_no , label in enumerate ( self . labels ) : new_label = LabelDimension ( label ) remove = False for k in kwargs : if k == label . name : new_label . max = kwargs [ k ] new_label . min = kwargs [ k ] remove = True if k == label . name + '__lt' : if new_label . units == '1' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] - 1 ] ) # is this right? else : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) #remove = True if k == label . name + '__lte' : new_label . max = np . min ( [ new_label . max , kwargs [ k ] ] ) #remove = True if k == label . name + '__gt' : if new_label . units == '1' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] + 1 ] ) else : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) #remove = True if k == label . name + '__gte' : new_label . min = np . max ( [ new_label . min , kwargs [ k ] ] ) #remove = True if k == label . name + '__evals' : remove = True if remove_dimensions : if remove : # skipping removed labels continue new_labels . append ( new_label ) return new_labels
7,941
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L606-L645
[ "def", "split_volume_from_journal", "(", "citation_elements", ")", ":", "for", "el", "in", "citation_elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", "and", "';'", "in", "el", "[", "'title'", "]", ":", "el", "[", "'title'", "]", ",", "series", "=", "el", "[", "'title'", "]", ".", "rsplit", "(", "';'", ",", "1", ")", "el", "[", "'volume'", "]", "=", "series", "+", "el", "[", "'volume'", "]", "return", "citation_elements" ]
Inplace method that adds meta information to the meta dictionary
def store_meta ( self , meta ) : if self . meta is None : self . meta = { } self . meta . update ( meta ) return self
7,942
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L879-L884
[ "def", "get_last_components_by_type", "(", "component_types", ",", "topic_id", ",", "db_conn", "=", "None", ")", ":", "db_conn", "=", "db_conn", "or", "flask", ".", "g", ".", "db_conn", "schedule_components_ids", "=", "[", "]", "for", "ct", "in", "component_types", ":", "where_clause", "=", "sql", ".", "and_", "(", "models", ".", "COMPONENTS", ".", "c", ".", "type", "==", "ct", ",", "models", ".", "COMPONENTS", ".", "c", ".", "topic_id", "==", "topic_id", ",", "models", ".", "COMPONENTS", ".", "c", ".", "export_control", "==", "True", ",", "models", ".", "COMPONENTS", ".", "c", ".", "state", "==", "'active'", ")", "# noqa", "query", "=", "(", "sql", ".", "select", "(", "[", "models", ".", "COMPONENTS", ".", "c", ".", "id", "]", ")", ".", "where", "(", "where_clause", ")", ".", "order_by", "(", "sql", ".", "desc", "(", "models", ".", "COMPONENTS", ".", "c", ".", "created_at", ")", ")", ")", "cmpt_id", "=", "db_conn", ".", "execute", "(", "query", ")", ".", "fetchone", "(", ")", "if", "cmpt_id", "is", "None", ":", "msg", "=", "'Component of type \"%s\" not found or not exported.'", "%", "ct", "raise", "dci_exc", ".", "DCIException", "(", "msg", ",", "status_code", "=", "412", ")", "cmpt_id", "=", "cmpt_id", "[", "0", "]", "if", "cmpt_id", "in", "schedule_components_ids", ":", "msg", "=", "(", "'Component types %s malformed: type %s duplicated.'", "%", "(", "component_types", ",", "ct", ")", ")", "raise", "dci_exc", ".", "DCIException", "(", "msg", ",", "status_code", "=", "412", ")", "schedule_components_ids", ".", "append", "(", "cmpt_id", ")", "return", "schedule_components_ids" ]
finds spike containers in multi spike containers collection offspring
def find ( self , cell_designation , cell_filter = lambda x , c : 'c' in x and x [ 'c' ] == c ) : if 'parent' in self . meta : return ( self . meta [ 'parent' ] , self . meta [ 'parent' ] . find ( cell_designation , cell_filter = cell_filter ) )
7,943
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L885-L890
[ "def", "_ParseWtmp", "(", ")", ":", "users", "=", "{", "}", "wtmp_struct_size", "=", "UtmpStruct", ".", "GetSize", "(", ")", "filenames", "=", "glob", ".", "glob", "(", "\"/var/log/wtmp*\"", ")", "+", "[", "\"/var/run/utmp\"", "]", "for", "filename", "in", "filenames", ":", "try", ":", "wtmp", "=", "open", "(", "filename", ",", "\"rb\"", ")", ".", "read", "(", ")", "except", "IOError", ":", "continue", "for", "offset", "in", "range", "(", "0", ",", "len", "(", "wtmp", ")", ",", "wtmp_struct_size", ")", ":", "try", ":", "record", "=", "UtmpStruct", "(", "wtmp", "[", "offset", ":", "offset", "+", "wtmp_struct_size", "]", ")", "except", "utils", ".", "ParsingError", ":", "break", "# Users only appear for USER_PROCESS events, others are system.", "if", "record", ".", "ut_type", "!=", "7", ":", "continue", "try", ":", "if", "users", "[", "record", ".", "ut_user", "]", "<", "record", ".", "tv_sec", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "except", "KeyError", ":", "users", "[", "record", ".", "ut_user", "]", "=", "record", ".", "tv_sec", "return", "users" ]
returns the Inter Spike Intervals
def ISIs ( self , time_dimension = 0 , units = None , min_t = None , max_t = None ) : units = self . _default_units ( units ) converted_dimension , st = self . spike_times . get_converted ( time_dimension , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max return np . diff ( sorted ( st [ ( st > min_t ) * ( st < max_t ) ] ) )
7,944
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1076-L1090
[ "def", "parse_journal", "(", "journal", ")", ":", "events", "=", "[", "e", "for", "e", "in", "journal", "if", "not", "isinstance", "(", "e", ",", "CorruptedUsnRecord", ")", "]", "keyfunc", "=", "lambda", "e", ":", "str", "(", "e", ".", "file_reference_number", ")", "+", "e", ".", "file_name", "+", "e", ".", "timestamp", "event_groups", "=", "(", "tuple", "(", "g", ")", "for", "k", ",", "g", "in", "groupby", "(", "events", ",", "key", "=", "keyfunc", ")", ")", "if", "len", "(", "events", ")", "<", "len", "(", "list", "(", "journal", ")", ")", ":", "LOGGER", ".", "debug", "(", "\"Corrupted records in UsnJrnl, some events might be missing.\"", ")", "return", "[", "journal_event", "(", "g", ")", "for", "g", "in", "event_groups", "]" ]
Outputs a time histogram of spikes .
def temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' ) : units = self . _default_units ( units ) if self . data_format == 'spike_times' : converted_dimension , st = self . spike_times . get_converted ( 0 , units ) if min_t is None : min_t = converted_dimension . min if max_t is None : max_t = converted_dimension . max st = st [ ( st >= min_t ) * ( st < max_t ) ] bins = converted_dimension . linspace_by_resolution ( resolution , end_at_end = True , extra_bins = 0 ) H , edg = np . histogram ( st , bins = bins ) if normalize_time : H = H / ( convert_time ( resolution , from_units = units , to_units = 's' ) ) # make it Hertz if normalize_n : H = H / ( len ( np . unique ( self . spike_times [ cell_dimension ] ) ) ) return H , edg
7,945
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1201-L1232
[ "def", "remove_server", "(", "self", ",", "server_id", ")", ":", "# Validate server_id", "server", "=", "self", ".", "_get_server", "(", "server_id", ")", "# Delete any instances we recorded to be cleaned up", "if", "server_id", "in", "self", ".", "_owned_subscriptions", ":", "inst_list", "=", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_filters", ":", "inst_list", "=", "self", ".", "_owned_filters", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_filters", "[", "server_id", "]", "if", "server_id", "in", "self", ".", "_owned_destinations", ":", "inst_list", "=", "self", ".", "_owned_destinations", "[", "server_id", "]", "# We iterate backwards because we change the list", "for", "i", "in", "six", ".", "moves", ".", "range", "(", "len", "(", "inst_list", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "inst", "=", "inst_list", "[", "i", "]", "server", ".", "conn", ".", "DeleteInstance", "(", "inst", ".", "path", ")", "del", "inst_list", "[", "i", "]", "del", "self", ".", "_owned_destinations", "[", "server_id", "]", "# Remove server from this listener", "del", "self", ".", "_servers", "[", "server_id", "]" ]
Plots a firing rate plot .
def plot_temporal_firing_rate ( self , time_dimension = 0 , resolution = 1.0 , units = None , min_t = None , max_t = None , weight_function = None , normalize_time = False , normalize_n = False , start_units_with_0 = True , cell_dimension = 'N' , * * kwargs ) : if bool ( self ) : import matplotlib . pylab as plt H , ed = self . temporal_firing_rate ( time_dimension = time_dimension , resolution = resolution , units = units , min_t = min_t , max_t = max_t , weight_function = weight_function , normalize_time = normalize_time , normalize_n = normalize_n , start_units_with_0 = start_units_with_0 , cell_dimension = cell_dimension ) plt . plot ( ed [ 1 : ] , H , * * kwargs )
7,946
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1235-L1245
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Returns the units of a Dimension
def get_units ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . units return [ self . spike_times . get_label ( a ) . units for a in args ]
7,947
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1381-L1387
[ "def", "security_pkg", "(", "self", ",", "pkg", ")", ":", "print", "(", "\"\"", ")", "self", ".", "template", "(", "78", ")", "print", "(", "\"| {0}{1}*** WARNING ***{2}\"", ")", ".", "format", "(", "\" \"", "*", "27", ",", "self", ".", "meta", ".", "color", "[", "\"RED\"", "]", ",", "self", ".", "meta", ".", "color", "[", "\"ENDC\"", "]", ")", "self", ".", "template", "(", "78", ")", "print", "(", "\"| Before proceed with the package '{0}' will you must read\\n\"", "\"| the README file. You can use the command \"", "\"'slpkg -n {1}'\"", ")", ".", "format", "(", "pkg", ",", "pkg", ")", "self", ".", "template", "(", "78", ")", "print", "(", "\"\"", ")" ]
Returns the minimum of a Dimension
def get_min ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . min return [ self . spike_times . get_label ( a ) . max for a in args ]
7,948
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1388-L1396
[ "def", "disconnect_pv_clients", "(", "self", ",", "mris", ")", ":", "# type: (List[str]) -> None", "for", "mri", "in", "mris", ":", "for", "pv", "in", "self", ".", "_pvs", ".", "pop", "(", "mri", ",", "{", "}", ")", ".", "values", "(", ")", ":", "# Close pv with force destroy on, this will call", "# onLastDisconnect", "pv", ".", "close", "(", "destroy", "=", "True", ",", "sync", "=", "True", ",", "timeout", "=", "1.0", ")" ]
Returns the maximum of a Dimension
def get_max ( self , * args , * * kwargs ) : if len ( args ) == 1 : return self . spike_times . get_label ( args [ 0 ] ) . max return [ self . spike_times . get_label ( a ) . max for a in args ]
7,949
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1397-L1405
[ "def", "apply_noise", "(", "data", ",", "noise", ")", ":", "if", "noise", ">=", "1", ":", "noise", "=", "noise", "/", "100.", "for", "i", "in", "range", "(", "data", ".", "nRows", "(", ")", ")", ":", "ones", "=", "data", ".", "rowNonZeros", "(", "i", ")", "[", "0", "]", "replace_indices", "=", "numpy", ".", "random", ".", "choice", "(", "ones", ",", "size", "=", "int", "(", "len", "(", "ones", ")", "*", "noise", ")", ",", "replace", "=", "False", ")", "for", "index", "in", "replace_indices", ":", "data", "[", "i", ",", "index", "]", "=", "0", "new_indices", "=", "numpy", ".", "random", ".", "choice", "(", "data", ".", "nCols", "(", ")", ",", "size", "=", "int", "(", "len", "(", "ones", ")", "*", "noise", ")", ",", "replace", "=", "False", ")", "for", "index", "in", "new_indices", ":", "while", "data", "[", "i", ",", "index", "]", "==", "1", ":", "index", "=", "numpy", ".", "random", ".", "randint", "(", "0", ",", "data", ".", "nCols", "(", ")", ")", "data", "[", "i", ",", "index", "]", "=", "1" ]
Like linspace but shifts the space to create edges for histograms .
def linspace_bins ( self , dim , * args , * * kwargs ) : return self . spike_times . get_label ( dim ) . linspace_bins ( * args , * * kwargs )
7,950
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1432-L1436
[ "def", "await_metadata_by_name", "(", "self", ",", "name", ",", "metadata_key", ",", "timeout", ",", "caster", "=", "None", ")", ":", "file_path", "=", "self", ".", "_metadata_file_path", "(", "name", ",", "metadata_key", ")", "self", ".", "_wait_for_file", "(", "file_path", ",", "timeout", "=", "timeout", ")", "return", "self", ".", "read_metadata_by_name", "(", "name", ",", "metadata_key", ",", "caster", ")" ]
Creates a brian 2 create_SpikeGeneratorGroup object that contains the spikes in this container .
def create_SpikeGeneratorGroup ( self , time_label = 0 , index_label = 1 , reorder_indices = False , index_offset = True ) : import brian2 spike_times = self . spike_times . convert ( time_label , 's' ) [ time_label ] * brian2 . second indices = [ 0 ] * len ( spike_times ) if len ( self . spike_times . find_labels ( index_label ) ) : indices = self . spike_times [ index_label ] if index_offset is not False : if index_offset is True : indices = indices - self . spike_times . get_label ( index_label ) . min else : indices = indices - index_offset N = np . max ( indices ) else : N = self . spike_times . get_label ( index_label ) . max if reorder_indices : indices_levels = np . sort ( np . unique ( indices ) ) . tolist ( ) indices = np . array ( [ indices_levels . index ( i ) for i in indices ] ) N = len ( indices_levels ) return brian2 . SpikeGeneratorGroup ( N + 1 , indices = indices , times = spike_times )
7,951
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1495-L1524
[ "def", "clean_field_dict", "(", "field_dict", ",", "cleaner", "=", "str", ".", "strip", ",", "time_zone", "=", "None", ")", ":", "d", "=", "{", "}", "if", "time_zone", "is", "None", ":", "tz", "=", "DEFAULT_TZ", "for", "k", ",", "v", "in", "viewitems", "(", "field_dict", ")", ":", "if", "k", "==", "'_state'", ":", "continue", "if", "isinstance", "(", "v", ",", "basestring", ")", ":", "d", "[", "k", "]", "=", "cleaner", "(", "str", "(", "v", ")", ")", "elif", "isinstance", "(", "v", ",", "(", "datetime", ".", "datetime", ",", "datetime", ".", "date", ")", ")", ":", "d", "[", "k", "]", "=", "tz", ".", "localize", "(", "v", ")", "else", ":", "d", "[", "k", "]", "=", "v", "return", "d" ]
Returns a neo Segment containing the spike trains .
def to_neo ( self , index_label = 'N' , time_label = 0 , name = 'segment of exported spikes' , index = 0 ) : import neo from quantities import s seq = neo . Segment ( name = name , index = index ) t_start = None t_stop = None if self . min_t is not None : t_start = convert_time ( self . min_t , from_units = self . units , to_units = 's' ) * s if self . max_t is not None : t_stop = convert_time ( self . max_t , from_units = self . units , to_units = 's' ) * s for train in self . generate ( index_label ) : seq . spiketrains . append ( neo . SpikeTrain ( train . spike_times . get_converted ( time_label , 's' ) [ 1 ] * s , t_start = t_start , t_stop = t_stop ) ) return seq
7,952
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/spikes.py#L1525-L1556
[ "def", "set_archive_layout", "(", "self", ",", "archive_id", ",", "layout_type", ",", "stylesheet", "=", "None", ")", ":", "payload", "=", "{", "'type'", ":", "layout_type", ",", "}", "if", "layout_type", "==", "'custom'", ":", "if", "stylesheet", "is", "not", "None", ":", "payload", "[", "'stylesheet'", "]", "=", "stylesheet", "endpoint", "=", "self", ".", "endpoints", ".", "set_archive_layout_url", "(", "archive_id", ")", "response", "=", "requests", ".", "put", "(", "endpoint", ",", "data", "=", "json", ".", "dumps", "(", "payload", ")", ",", "headers", "=", "self", ".", "json_headers", "(", ")", ",", "proxies", "=", "self", ".", "proxies", ",", "timeout", "=", "self", ".", "timeout", ")", "if", "response", ".", "status_code", "==", "200", ":", "pass", "elif", "response", ".", "status_code", "==", "400", ":", "raise", "ArchiveError", "(", "'Invalid request. This response may indicate that data in your request data is invalid JSON. It may also indicate that you passed in invalid layout options.'", ")", "elif", "response", ".", "status_code", "==", "403", ":", "raise", "AuthError", "(", "'Authentication error.'", ")", "else", ":", "raise", "RequestError", "(", "'OpenTok server error.'", ",", "response", ".", "status_code", ")" ]
Update the modification status in the mode bar .
def qteModificationChanged ( self , mod ) : if mod : s = '*' else : s = '-' self . _qteModeBar . qteChangeModeValue ( 'MODIFIED' , s )
7,953
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/scieditor.py#L214-L225
[ "def", "synthesize", "(", "self", ",", "text", ",", "voice", "=", "None", ",", "customization_id", "=", "None", ",", "accept", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "text", "is", "None", ":", "raise", "ValueError", "(", "'text must be provided'", ")", "headers", "=", "{", "'Accept'", ":", "accept", "}", "if", "'headers'", "in", "kwargs", ":", "headers", ".", "update", "(", "kwargs", ".", "get", "(", "'headers'", ")", ")", "sdk_headers", "=", "get_sdk_headers", "(", "'text_to_speech'", ",", "'V1'", ",", "'synthesize'", ")", "headers", ".", "update", "(", "sdk_headers", ")", "params", "=", "{", "'voice'", ":", "voice", ",", "'customization_id'", ":", "customization_id", "}", "data", "=", "{", "'text'", ":", "text", "}", "url", "=", "'/v1/synthesize'", "response", "=", "self", ".", "request", "(", "method", "=", "'POST'", ",", "url", "=", "url", ",", "headers", "=", "headers", ",", "params", "=", "params", ",", "json", "=", "data", ",", "accept_json", "=", "False", ")", "return", "response" ]
Display the file fileName .
def loadFile ( self , fileName ) : self . fileName = fileName # Assign QFile object with the current name. self . file = QtCore . QFile ( fileName ) if self . file . exists ( ) : # Load the file into the widget and reset the undo stack # to delete the undo object create by the setText method. # Without it, an undo operation would delete the content # of the widget which is intuitive. self . qteScintilla . setText ( open ( fileName ) . read ( ) ) self . qteScintilla . qteUndoStack . reset ( ) else : msg = "File <b>{}</b> does not exist" . format ( self . qteAppletID ( ) ) self . qteLogger . info ( msg )
7,954
https://github.com/olitheolix/qtmacs/blob/36253b082b82590f183fe154b053eb3a1e741be2/qtmacs/applets/scieditor.py#L239-L256
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
essentially the generalization of arg if arg else default
def conv ( arg , default = None , func = None ) : if func : return func ( arg ) if arg else default else : return arg if arg else default
7,955
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L10-L23
[ "def", "_PrintSessionsOverview", "(", "self", ",", "storage_reader", ")", ":", "table_view", "=", "views", ".", "ViewsFactory", ".", "GetTableView", "(", "self", ".", "_views_format_type", ",", "title", "=", "'Sessions'", ")", "for", "session", "in", "storage_reader", ".", "GetSessions", "(", ")", ":", "start_time", "=", "timelib", ".", "Timestamp", ".", "CopyToIsoFormat", "(", "session", ".", "start_time", ")", "session_identifier", "=", "uuid", ".", "UUID", "(", "hex", "=", "session", ".", "identifier", ")", "session_identifier", "=", "'{0!s}'", ".", "format", "(", "session_identifier", ")", "table_view", ".", "AddRow", "(", "[", "session_identifier", ",", "start_time", "]", ")", "table_view", ".", "Write", "(", "self", ".", "_output_writer", ")" ]
quick pickle dump similar to np . save
def dump_pickle ( name , obj ) : with open ( name , "wb" ) as f : pickle . dump ( obj , f , 2 ) pass
7,956
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L36-L40
[ "def", "delete", "(", "self", ",", "event", ")", ":", "super", "(", "CeleryReceiver", ",", "self", ")", ".", "delete", "(", "event", ")", "AsyncResult", "(", "event", ".", "id", ")", ".", "revoke", "(", "terminate", "=", "True", ")" ]
chunk l in n sized bits
def chunks ( l , n ) : #http://stackoverflow.com/a/3226719 #...not that this is hard to understand. return [ l [ x : x + n ] for x in range ( 0 , len ( l ) , n ) ]
7,957
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L48-L52
[ "async", "def", "_wait_exponentially", "(", "self", ",", "exception", ",", "max_wait_time", "=", "300", ")", ":", "wait_time", "=", "min", "(", "(", "2", "**", "self", ".", "_connection_attempts", ")", "+", "random", ".", "random", "(", ")", ",", "max_wait_time", ")", "try", ":", "wait_time", "=", "exception", ".", "response", "[", "\"headers\"", "]", "[", "\"Retry-After\"", "]", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "self", ".", "_logger", ".", "debug", "(", "\"Waiting %s seconds before reconnecting.\"", ",", "wait_time", ")", "await", "asyncio", ".", "sleep", "(", "float", "(", "wait_time", ")", ")" ]
checked verbose printing
def check_vprint ( s , vprinter ) : if vprinter is True : print ( s ) elif callable ( vprinter ) : vprinter ( s )
7,958
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L54-L59
[ "def", "load_stl_ascii", "(", "file_obj", ")", ":", "# the first line is the header", "header", "=", "file_obj", ".", "readline", "(", ")", "# make sure header is a string, not bytes", "if", "hasattr", "(", "header", ",", "'decode'", ")", ":", "try", ":", "header", "=", "header", ".", "decode", "(", "'utf-8'", ")", "except", "BaseException", ":", "header", "=", "''", "# save header to metadata", "metadata", "=", "{", "'header'", ":", "header", "}", "# read all text into one string", "text", "=", "file_obj", ".", "read", "(", ")", "# convert bytes to string", "if", "hasattr", "(", "text", ",", "'decode'", ")", ":", "text", "=", "text", ".", "decode", "(", "'utf-8'", ")", "# split by endsolid keyword", "text", "=", "text", ".", "lower", "(", ")", ".", "split", "(", "'endsolid'", ")", "[", "0", "]", "# create array of splits", "blob", "=", "np", ".", "array", "(", "text", ".", "strip", "(", ")", ".", "split", "(", ")", ")", "# there are 21 'words' in each face", "face_len", "=", "21", "# length of blob should be multiple of face_len", "if", "(", "len", "(", "blob", ")", "%", "face_len", ")", "!=", "0", ":", "raise", "HeaderError", "(", "'Incorrect length STL file!'", ")", "face_count", "=", "int", "(", "len", "(", "blob", ")", "/", "face_len", ")", "# this offset is to be added to a fixed set of tiled indices", "offset", "=", "face_len", "*", "np", ".", "arange", "(", "face_count", ")", ".", "reshape", "(", "(", "-", "1", ",", "1", ")", ")", "normal_index", "=", "np", ".", "tile", "(", "[", "2", ",", "3", ",", "4", "]", ",", "(", "face_count", ",", "1", ")", ")", "+", "offset", "vertex_index", "=", "np", ".", "tile", "(", "[", "8", ",", "9", ",", "10", ",", "12", ",", "13", ",", "14", ",", "16", ",", "17", ",", "18", "]", ",", "(", "face_count", ",", "1", ")", ")", "+", "offset", "# faces are groups of three sequential vertices", "faces", "=", "np", ".", "arange", "(", "face_count", "*", "3", ")", ".", "reshape", "(", "(", "-", "1", ",", "3", ")", ")", "face_normals", "=", "blob", "[", "normal_index", "]", ".", "astype", "(", "'<f8'", ")", "vertices", "=", "blob", "[", "vertex_index", ".", "reshape", "(", "(", "-", "1", ",", "3", ")", ")", "]", ".", "astype", "(", "'<f8'", ")", "return", "{", "'vertices'", ":", "vertices", ",", "'faces'", ":", "faces", ",", "'metadata'", ":", "metadata", ",", "'face_normals'", ":", "face_normals", "}" ]
read lines from a file into lines ... optional strip
def filelines ( fname , strip = False ) : with open ( fname , 'r' ) as f : lines = f . readlines ( ) if strip : lines [ : ] = [ line . strip ( ) for line in lines ] return lines
7,959
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L69-L75
[ "def", "updateSeriesRegistrationStatus", "(", ")", ":", "from", ".", "models", "import", "Series", "if", "not", "getConstant", "(", "'general__enableCronTasks'", ")", ":", "return", "logger", ".", "info", "(", "'Checking status of Series that are open for registration.'", ")", "open_series", "=", "Series", ".", "objects", ".", "filter", "(", ")", ".", "filter", "(", "*", "*", "{", "'registrationOpen'", ":", "True", "}", ")", "for", "series", "in", "open_series", ":", "series", ".", "updateRegistrationStatus", "(", ")" ]
parse a string into a list of a uniform type
def parse_utuple ( s , urx , length = 2 ) : if type ( urx ) != str : urx = urx . pattern if length is not None and length < 1 : raise ValueError ( "invalid length: {}" . format ( length ) ) if length == 1 : rx = r"^ *\( *{urx} *,? *\) *$" . format ( urx = urx ) elif length is None : rx = r"^ *\( *(?:{urx} *, *)*{urx} *,? *\) *$" . format ( urx = urx ) else : rx = r"^ *\( *(?:{urx} *, *){{{rep1}}}{urx} *,? *\) *$" . format ( rep1 = length - 1 , urx = urx ) return re . match ( rx , s )
7,960
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L90-L104
[ "def", "_compute_ogg_page_crc", "(", "page", ")", ":", "page_zero_crc", "=", "page", "[", ":", "OGG_FIRST_PAGE_HEADER_CRC_OFFSET", "]", "+", "b\"\\00\"", "*", "OGG_FIRST_PAGE_HEADER_CRC", ".", "size", "+", "page", "[", "OGG_FIRST_PAGE_HEADER_CRC_OFFSET", "+", "OGG_FIRST_PAGE_HEADER_CRC", ".", "size", ":", "]", "return", "ogg_page_crc", "(", "page_zero_crc", ")" ]
parse a string into a list of numbers of a type
def parse_numtuple ( s , intype , length = 2 , scale = 1 ) : if intype == int : numrx = intrx_s elif intype == float : numrx = fltrx_s else : raise NotImplementedError ( "Not implemented for type: {}" . format ( intype ) ) if parse_utuple ( s , numrx , length = length ) is None : raise ValueError ( "{} is not a valid number tuple." . format ( s ) ) return [ x * scale for x in evalt ( s ) ]
7,961
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L110-L121
[ "def", "_read_para_rvs_hmac", "(", "self", ",", "code", ",", "cbit", ",", "clen", ",", "*", ",", "desc", ",", "length", ",", "version", ")", ":", "_hmac", "=", "self", ".", "_read_fileng", "(", "clen", ")", "rvs_hmac", "=", "dict", "(", "type", "=", "desc", ",", "critical", "=", "cbit", ",", "length", "=", "clen", ",", "hmac", "=", "_hmac", ",", ")", "_plen", "=", "length", "-", "clen", "if", "_plen", ":", "self", ".", "_read_fileng", "(", "_plen", ")", "return", "rvs_hmac" ]
parse a string of acceptable colors into matplotlib that is either strings or three tuples of rgb . Don t quote strings .
def parse_ctuple ( s , length = 2 ) : if parse_utuple ( s , colrx_s , length = length ) is None : raise ValueError ( "{} is not a valid color tuple." . format ( s ) ) #quote strings s = quote_subs ( s , colorfix = True ) return evalt ( s )
7,962
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L130-L138
[ "def", "_GetLoadConfigTimestamp", "(", "self", ",", "pefile_object", ")", ":", "if", "not", "hasattr", "(", "pefile_object", ",", "'DIRECTORY_ENTRY_LOAD_CONFIG'", ")", ":", "return", "None", "timestamp", "=", "getattr", "(", "pefile_object", ".", "DIRECTORY_ENTRY_LOAD_CONFIG", ".", "struct", ",", "'TimeDateStamp'", ",", "0", ")", "return", "timestamp" ]
parse a string of strings . Don t quote strings
def parse_stuple ( s , length = 2 ) : if parse_utuple ( s , isrx_s , length = length ) is None : raise ValueError ( "{} is not a valid string tuple." . format ( s ) ) s = quote_subs ( s ) return evalt ( s )
7,963
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L140-L145
[ "def", "get_events", "(", "self", ")", ":", "header", "=", "BASE_HEADERS", ".", "copy", "(", ")", "header", "[", "'Cookie'", "]", "=", "self", ".", "__cookie", "request", "=", "requests", ".", "post", "(", "BASE_URL", "+", "'getEvents'", ",", "headers", "=", "header", ",", "timeout", "=", "10", ")", "if", "request", ".", "status_code", "!=", "200", ":", "self", ".", "__logged_in", "=", "False", "self", ".", "login", "(", ")", "self", ".", "get_events", "(", ")", "return", "try", ":", "result", "=", "request", ".", "json", "(", ")", "except", "ValueError", "as", "error", ":", "raise", "Exception", "(", "\"Not a valid result for getEvent,\"", "+", "\" protocol error: \"", "+", "error", ")", "return", "self", ".", "_get_events", "(", "result", ")" ]
helper for parsing a string that can be either a matplotlib color or be a tuple of colors . Returns a tuple of them either way .
def parse_colors ( s , length = 1 ) : if length and length > 1 : return parse_ctuple ( s , length = length ) if re . match ( '^ *{} *$' . format ( isrx_s ) , s ) : #it's just a string. return [ s ] elif re . match ( '^ *{} *$' . format ( rgbrx_s ) , s ) : return [ eval ( s ) ] else : return parse_ctuple ( s , length = length )
7,964
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L155-L168
[ "def", "emulate_rel", "(", "self", ",", "key_code", ",", "value", ",", "timeval", ")", ":", "return", "self", ".", "create_event_object", "(", "\"Relative\"", ",", "key_code", ",", "value", ",", "timeval", ")" ]
helper for parsing a string that can both rx or parsef which is obstensibly the parsef for rx .
def parse_qs ( s , rx , parsef = None , length = 2 , quote = False ) : if type ( rx ) != str : rx = rx . pattern if re . match ( " *\(.*\)" , s ) : if not parsef : if parse_utuple ( s , rx , length = length ) : if quote : s = quote_subs ( s ) return evalt ( s ) else : raise ValueError ( "{} did is not a valid tuple of {}" . format ( s , rx ) ) else : return parsef ( s , length = length ) elif re . match ( '^ *{} *$' . format ( rx ) , s ) : if quote : return eval ( '["{}"]' . format ( s ) ) return eval ( '[{}]' . format ( s ) ) else : raise ValueError ( "{} does not match '{}' or the passed parsef" . format ( s , rx ) )
7,965
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L170-L196
[ "def", "default_kms_key_name", "(", "self", ",", "value", ")", ":", "encryption_config", "=", "self", ".", "_properties", ".", "get", "(", "\"encryption\"", ",", "{", "}", ")", "encryption_config", "[", "\"defaultKmsKeyName\"", "]", "=", "value", "self", ".", "_patch_property", "(", "\"encryption\"", ",", "encryption_config", ")" ]
A hack to return a modified dict dynamically . Basically Does classless OOP as in js but with dicts although not really for the verb parts of OOP but more of the subject stuff .
def sd ( d , * * kw ) : #HURR SO COMPLICATED r = { } #copy. if you want to modify, r . update ( d ) #use {}.update r . update ( kw ) return r
7,966
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L199-L222
[ "def", "on_exception", "(", "self", ",", "exception", ")", ":", "logger", ".", "error", "(", "'Exception from stream!'", ",", "exc_info", "=", "True", ")", "self", ".", "streaming_exception", "=", "exception" ]
a helper for generating a function for reading keywords in interface functions with a dictionary with defaults
def mk_getkw ( kw , defaults , prefer_passed = False ) : def getkw ( * ls ) : r = [ kw [ l ] if test ( kw , l ) else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r def getkw_prefer_passed ( * ls ) : r = [ kw [ l ] if l in kw else defaults [ l ] for l in ls ] if len ( r ) == 1 : return r [ 0 ] return r return getkw if not prefer_passed else getkw_prefer_passed
7,967
https://github.com/noobermin/pys/blob/e01b74210c65eb96d019bb42e0a3c9e6676da943/pys/__init__.py#L250-L281
[ "def", "logEndTime", "(", ")", ":", "logger", ".", "info", "(", "'\\n'", "+", "'#'", "*", "70", ")", "logger", ".", "info", "(", "'Complete'", ")", "logger", ".", "info", "(", "datetime", ".", "today", "(", ")", ".", "strftime", "(", "\"%A, %d %B %Y %I:%M%p\"", ")", ")", "logger", ".", "info", "(", "'#'", "*", "70", "+", "'\\n'", ")" ]
Load resource data from server
def _load_resource ( self ) : url = self . _url if self . _params : url += '?{0}' . format ( six . moves . urllib_parse . urlencode ( self . _params ) ) r = getattr ( self . _session , self . _meta . get_method . lower ( ) ) ( url ) if r . status_code == 404 : raise NotFoundException ( 'Server returned 404 Not Found for the URL {0}' . format ( self . _url ) ) elif not 200 <= r . status_code < 400 : raise HTTPException ( 'Server returned {0} ({1})' . format ( r . status_code , r . reason ) , r ) data = self . _meta . deserializer . to_dict ( r . text ) self . populate_field_values ( data )
7,968
https://github.com/consbio/restle/blob/60d100da034c612d4910f4f79eaa57a76eb3dcc6/restle/resources.py#L69-L84
[ "def", "add_headers", "(", "width", "=", "80", ",", "title", "=", "'Untitled'", ",", "subtitle", "=", "''", ",", "author", "=", "''", ",", "email", "=", "''", ",", "description", "=", "''", ",", "tunings", "=", "[", "]", ")", ":", "result", "=", "[", "''", "]", "title", "=", "str", ".", "upper", "(", "title", ")", "result", "+=", "[", "str", ".", "center", "(", "' '", ".", "join", "(", "title", ")", ",", "width", ")", "]", "if", "subtitle", "!=", "''", ":", "result", "+=", "[", "''", ",", "str", ".", "center", "(", "str", ".", "title", "(", "subtitle", ")", ",", "width", ")", "]", "if", "author", "!=", "''", "or", "email", "!=", "''", ":", "result", "+=", "[", "''", ",", "''", "]", "if", "email", "!=", "''", ":", "result", "+=", "[", "str", ".", "center", "(", "'Written by: %s <%s>'", "%", "(", "author", ",", "email", ")", ",", "width", ")", "]", "else", ":", "result", "+=", "[", "str", ".", "center", "(", "'Written by: %s'", "%", "author", ",", "width", ")", "]", "if", "description", "!=", "''", ":", "result", "+=", "[", "''", ",", "''", "]", "words", "=", "description", ".", "split", "(", ")", "lines", "=", "[", "]", "line", "=", "[", "]", "last", "=", "0", "for", "word", "in", "words", ":", "if", "len", "(", "word", ")", "+", "last", "<", "width", "-", "10", ":", "line", ".", "append", "(", "word", ")", "last", "+=", "len", "(", "word", ")", "+", "1", "else", ":", "lines", ".", "append", "(", "line", ")", "line", "=", "[", "word", "]", "last", "=", "len", "(", "word", ")", "+", "1", "lines", ".", "append", "(", "line", ")", "for", "line", "in", "lines", ":", "result", "+=", "[", "str", ".", "center", "(", "' '", ".", "join", "(", "line", ")", ",", "width", ")", "]", "if", "tunings", "!=", "[", "]", ":", "result", "+=", "[", "''", ",", "''", ",", "str", ".", "center", "(", "'Instruments'", ",", "width", ")", "]", "for", "(", "i", ",", "tuning", ")", "in", "enumerate", "(", "tunings", ")", ":", "result", "+=", "[", "''", ",", "str", ".", "center", "(", "'%d. %s'", "%", "(", "i", "+", "1", ",", "tuning", ".", "instrument", ")", ",", "width", ")", ",", "str", ".", "center", "(", "tuning", ".", "description", ",", "width", ")", "]", "result", "+=", "[", "''", ",", "''", "]", "return", "result" ]
Load resource data and populate field values
def populate_field_values ( self , data ) : if not self . _meta . case_sensitive_fields : data = { k . lower ( ) : v for k , v in six . iteritems ( data ) } if self . _meta . match_fuzzy_keys : # String any non-alphanumeric chars from each key data = { '' . join ( x for x in k if x in ALPHANUMERIC ) . lower ( ) : v for k , v in six . iteritems ( data ) } for field in self . _meta . fields : name = field . name if self . _meta . case_sensitive_fields else field . name . lower ( ) value = None if self . _meta . match_fuzzy_keys : name = '' . join ( x for x in name if x in ALPHANUMERIC ) . lower ( ) if name in data : value = field . to_python ( data [ name ] , self ) elif field . required and field . default is None : message = "Response from {0} is missing required field '{1}'" . format ( self . _url , field . name ) if self . _strict : raise MissingFieldException ( message ) else : logger . warn ( message ) elif field . default is not None : value = copy . copy ( field . default ) setattr ( self , field . _attr_name , value ) self . _populated_field_values = True
7,969
https://github.com/consbio/restle/blob/60d100da034c612d4910f4f79eaa57a76eb3dcc6/restle/resources.py#L86-L116
[ "def", "_remove_clublog_xml_header", "(", "self", ",", "cty_xml_filename", ")", ":", "import", "tempfile", "try", ":", "with", "open", "(", "cty_xml_filename", ",", "\"r\"", ")", "as", "f", ":", "content", "=", "f", ".", "readlines", "(", ")", "cty_dir", "=", "tempfile", ".", "gettempdir", "(", ")", "cty_name", "=", "os", ".", "path", ".", "split", "(", "cty_xml_filename", ")", "[", "1", "]", "cty_xml_filename_no_header", "=", "os", ".", "path", ".", "join", "(", "cty_dir", ",", "\"NoHeader_\"", "+", "cty_name", ")", "with", "open", "(", "cty_xml_filename_no_header", ",", "\"w\"", ")", "as", "f", ":", "f", ".", "writelines", "(", "\"<clublog>\\n\\r\"", ")", "f", ".", "writelines", "(", "content", "[", "1", ":", "]", ")", "self", ".", "_logger", ".", "debug", "(", "\"Header successfully modified for XML Parsing\"", ")", "return", "cty_xml_filename_no_header", "except", "Exception", "as", "e", ":", "self", ".", "_logger", ".", "error", "(", "\"Clublog CTY could not be opened / modified\"", ")", "self", ".", "_logger", ".", "error", "(", "\"Error Message: \"", "+", "str", "(", "e", ")", ")", "return" ]
Clear all object descriptors for stopped task . Task must be joined prior to calling this method .
def close_thread ( self ) : if self . __thread is not None and self . __thread . is_alive ( ) is True : raise WThreadJoiningTimeoutError ( 'Thread is still alive. Thread name: %s' % self . __thread . name ) self . start_event ( ) . clear ( ) self . __thread = None
7,970
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/thread.py#L166-L174
[ "def", "calc_temp", "(", "Data_ref", ",", "Data", ")", ":", "T", "=", "300", "*", "(", "(", "Data", ".", "A", "*", "Data_ref", ".", "Gamma", ")", "/", "(", "Data_ref", ".", "A", "*", "Data", ".", "Gamma", ")", ")", "Data", ".", "T", "=", "T", "return", "T" ]
Trace my_module exceptions .
def trace_module ( no_print = True ) : pwd = os . path . dirname ( __file__ ) script_name = os . path . join ( pwd , "test_my_module.py" ) with pexdoc . ExDocCxt ( ) as exdoc_obj : if pytest . main ( [ "-s" , "-vv" , "-x" , "{0}" . format ( script_name ) ] ) : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
7,971
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/docs/support/trace_my_module_1.py#L14-L29
[ "def", "apply_binding", "(", "self", ",", "binding", ",", "msg_str", ",", "destination", "=", "\"\"", ",", "relay_state", "=", "\"\"", ",", "response", "=", "False", ",", "sign", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# unless if BINDING_HTTP_ARTIFACT", "if", "response", ":", "typ", "=", "\"SAMLResponse\"", "else", ":", "typ", "=", "\"SAMLRequest\"", "if", "binding", "==", "BINDING_HTTP_POST", ":", "logger", ".", "info", "(", "\"HTTP POST\"", ")", "# if self.entity_type == 'sp':", "# info = self.use_http_post(msg_str, destination, relay_state,", "# typ)", "# info[\"url\"] = destination", "# info[\"method\"] = \"POST\"", "# else:", "info", "=", "self", ".", "use_http_form_post", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ")", "info", "[", "\"url\"", "]", "=", "destination", "info", "[", "\"method\"", "]", "=", "\"POST\"", "elif", "binding", "==", "BINDING_HTTP_REDIRECT", ":", "logger", ".", "info", "(", "\"HTTP REDIRECT\"", ")", "sigalg", "=", "kwargs", ".", "get", "(", "\"sigalg\"", ")", "if", "sign", "and", "sigalg", ":", "signer", "=", "self", ".", "sec", ".", "sec_backend", ".", "get_signer", "(", "sigalg", ")", "else", ":", "signer", "=", "None", "info", "=", "self", ".", "use_http_get", "(", "msg_str", ",", "destination", ",", "relay_state", ",", "typ", ",", "signer", "=", "signer", ",", "*", "*", "kwargs", ")", "info", "[", "\"url\"", "]", "=", "str", "(", "destination", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "elif", "binding", "==", "BINDING_SOAP", "or", "binding", "==", "BINDING_PAOS", ":", "info", "=", "self", ".", "use_soap", "(", "msg_str", ",", "destination", ",", "sign", "=", "sign", ",", "*", "*", "kwargs", ")", "elif", "binding", "==", "BINDING_URI", ":", "info", "=", "self", ".", "use_http_uri", "(", "msg_str", ",", "typ", ",", "destination", ")", "elif", "binding", "==", "BINDING_HTTP_ARTIFACT", ":", "if", "response", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "info", "[", "\"method\"", "]", "=", "\"GET\"", "info", "[", "\"status\"", "]", "=", "302", "else", ":", "info", "=", "self", ".", "use_http_artifact", "(", "msg_str", ",", "destination", ",", "relay_state", ")", "else", ":", "raise", "SAMLError", "(", "\"Unknown binding type: %s\"", "%", "binding", ")", "return", "info" ]
Check header name for validity . Return True if name is valid
def header_name_check ( header_name ) : header_match = WHTTPHeaders . header_name_re . match ( header_name . encode ( 'us-ascii' ) ) return len ( header_name ) > 0 and header_match is not None
7,972
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L53-L60
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
Remove header by its name
def remove_headers ( self , header_name ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : self . __headers . pop ( header_name )
7,973
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L83-L93
[ "def", "initialize_schema", "(", "connection", ")", ":", "cursor", "=", "connection", ".", "cursor", "(", ")", "cursor", ".", "execute", "(", "\"PRAGMA application_id={}\"", ".", "format", "(", "_TENSORBOARD_APPLICATION_ID", ")", ")", "cursor", ".", "execute", "(", "\"PRAGMA user_version={}\"", ".", "format", "(", "_TENSORBOARD_USER_VERSION", ")", ")", "with", "connection", ":", "for", "statement", "in", "_SCHEMA_STATEMENTS", ":", "lines", "=", "statement", ".", "strip", "(", "'\\n'", ")", ".", "split", "(", "'\\n'", ")", "message", "=", "lines", "[", "0", "]", "+", "(", "'...'", "if", "len", "(", "lines", ")", ">", "1", "else", "''", ")", "logger", ".", "debug", "(", "'Running DB init statement: %s'", ",", "message", ")", "cursor", ".", "execute", "(", "statement", ")" ]
Add new header
def add_headers ( self , header_name , value , * values ) : if self . __ro_flag : raise RuntimeError ( 'ro' ) header_name = self . normalize_name ( header_name ) if header_name not in self . __headers . keys ( ) : self . __headers [ header_name ] = [ value ] else : self . __headers [ header_name ] . append ( value ) for single_value in values : self . __headers [ header_name ] . append ( single_value )
7,974
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L98-L116
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Return header value by its name
def get_headers ( self , header_name ) : header_name = self . normalize_name ( header_name ) if header_name in self . __headers . keys ( ) : return tuple ( self . __headers [ header_name ] )
7,975
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L137-L145
[ "def", "_set_cinema_params", "(", "self", ",", "cinema_mode", ",", "fps", ")", ":", "if", "re", ".", "match", "(", "\"1.5|2.0\"", ",", "version", ".", "openjpeg_version", ")", "is", "not", "None", ":", "msg", "=", "(", "\"Writing Cinema2K or Cinema4K files is not supported with \"", "\"OpenJPEG library versions less than 2.1.0. The installed \"", "\"version of OpenJPEG is {version}.\"", ")", "msg", "=", "msg", ".", "format", "(", "version", "=", "version", ".", "openjpeg_version", ")", "raise", "IOError", "(", "msg", ")", "# Cinema modes imply MCT.", "self", ".", "_cparams", ".", "tcp_mct", "=", "1", "if", "cinema_mode", "==", "'cinema2k'", ":", "if", "fps", "not", "in", "[", "24", ",", "48", "]", ":", "msg", "=", "'Cinema2K frame rate must be either 24 or 48.'", "raise", "IOError", "(", "msg", ")", "if", "fps", "==", "24", ":", "self", ".", "_cparams", ".", "rsiz", "=", "core", ".", "OPJ_PROFILE_CINEMA_2K", "self", ".", "_cparams", ".", "max_comp_size", "=", "core", ".", "OPJ_CINEMA_24_COMP", "self", ".", "_cparams", ".", "max_cs_size", "=", "core", ".", "OPJ_CINEMA_24_CS", "else", ":", "self", ".", "_cparams", ".", "rsiz", "=", "core", ".", "OPJ_PROFILE_CINEMA_2K", "self", ".", "_cparams", ".", "max_comp_size", "=", "core", ".", "OPJ_CINEMA_48_COMP", "self", ".", "_cparams", ".", "max_cs_size", "=", "core", ".", "OPJ_CINEMA_48_CS", "else", ":", "# cinema4k", "self", ".", "_cparams", ".", "rsiz", "=", "core", ".", "OPJ_PROFILE_CINEMA_4K" ]
Return object copy with header names saved as it is described in the given protocol version
def switch_name_style ( self , http_protocol_version ) : new_headers = WHTTPHeaders ( ) new_headers . __normalization_mode = http_protocol_version names = self . headers ( ) for name in names : new_headers . add_headers ( name , * self . get_headers ( name ) ) for cookie_name in self . __set_cookies . cookies ( ) : new_headers . __set_cookies . add_cookie ( self . __set_cookies [ cookie_name ] . copy ( ) ) return new_headers
7,976
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L178-L193
[ "def", "setOverlayTransformAbsolute", "(", "self", ",", "ulOverlayHandle", ",", "eTrackingOrigin", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformAbsolute", "pmatTrackingOriginToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "eTrackingOrigin", ",", "byref", "(", "pmatTrackingOriginToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackingOriginToOverlayTransform" ]
Return read - only copy of this object
def ro ( self ) : ro_headers = WHTTPHeaders ( ) names = self . headers ( ) for name in names : ro_headers . add_headers ( name , * self . get_headers ( name ) ) ro_headers . __cookies = self . __set_cookies . ro ( ) ro_headers . __ro_flag = True return ro_headers
7,977
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L195-L206
[ "def", "run_tensorboard", "(", "logdir", ",", "listen_on", "=", "\"0.0.0.0\"", ",", "port", "=", "0", ",", "tensorboard_args", "=", "None", ",", "timeout", "=", "10", ")", ":", "if", "tensorboard_args", "is", "None", ":", "tensorboard_args", "=", "[", "]", "tensorboard_instance", "=", "Process", ".", "create_process", "(", "TENSORBOARD_BINARY", ".", "split", "(", "\" \"", ")", "+", "[", "\"--logdir\"", ",", "logdir", ",", "\"--host\"", ",", "listen_on", ",", "\"--port\"", ",", "str", "(", "port", ")", "]", "+", "tensorboard_args", ")", "try", ":", "tensorboard_instance", ".", "run", "(", ")", "except", "FileNotFoundError", "as", "ex", ":", "raise", "TensorboardNotFoundError", "(", "ex", ")", "# Wait for a message that signaliezes start of Tensorboard", "start", "=", "time", ".", "time", "(", ")", "data", "=", "\"\"", "while", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "line", "=", "tensorboard_instance", ".", "read_line_stderr", "(", "time_limit", "=", "timeout", ")", "data", "+=", "line", "if", "\"at http://\"", "in", "line", ":", "port", "=", "parse_port_from_tensorboard_output", "(", "line", ")", "# Good case", "return", "port", "elif", "\"TensorBoard attempted to bind to port\"", "in", "line", ":", "break", "tensorboard_instance", ".", "terminate", "(", ")", "raise", "UnexpectedOutputError", "(", "data", ",", "expected", "=", "\"Confirmation that Tensorboard has started\"", ")" ]
Return internal cookie jar that must be used as HTTP - request cookies
def client_cookie_jar ( self ) : cookie_jar = WHTTPCookieJar ( ) cookie_header = self . get_headers ( 'Cookie' ) for cookie_string in ( cookie_header if cookie_header is not None else tuple ( ) ) : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : cookie_jar . add_cookie ( single_cookie ) return cookie_jar . ro ( )
7,978
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L230-L243
[ "def", "schema_complete", "(", ")", ":", "return", "Schema", "(", "{", "'stage'", ":", "And", "(", "str", ",", "len", ")", ",", "'timestamp'", ":", "int", ",", "'status'", ":", "And", "(", "str", ",", "lambda", "s", ":", "s", "in", "[", "'started'", ",", "'succeeded'", ",", "'failed'", "]", ")", ",", "# optional matrix", "Optional", "(", "'matrix'", ",", "default", "=", "'default'", ")", ":", "And", "(", "str", ",", "len", ")", ",", "# optional information", "Optional", "(", "'information'", ",", "default", "=", "{", "}", ")", ":", "{", "Optional", "(", "Regex", "(", "r'([a-z][_a-z]*)'", ")", ")", ":", "object", "}", "}", ")" ]
Create WHTTPHeaders by the given code . If code has Set - Cookie headers that headers are parsed data are stored in internal cookie jar . At the end of parsing Set - Cookie headers are removed from the result
def import_headers ( cls , http_code ) : headers = WHTTPHeaders ( ) message = email . message_from_file ( StringIO ( http_code ) ) for header_name , header_value in message . items ( ) : headers . add_headers ( header_name , header_value ) cookie_header = headers . get_headers ( 'Set-Cookie' ) if cookie_header is not None : for cookie_string in cookie_header : for single_cookie in WHTTPCookieJar . import_header_text ( cookie_string ) : headers . set_cookie_jar ( ) . add_cookie ( single_cookie ) headers . remove_headers ( 'Set-Cookie' ) return headers
7,979
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/headers.py#L247-L267
[ "def", "moment_inertia", "(", "self", ")", ":", "tensor", "=", "inertia", ".", "sphere_inertia", "(", "mass", "=", "self", ".", "volume", ",", "radius", "=", "self", ".", "primitive", ".", "radius", ")", "return", "tensor" ]
Trace my_module_original exceptions .
def trace_module ( no_print = True ) : with pexdoc . ExDocCxt ( ) as exdoc_obj : try : docs . support . my_module . func ( "John" ) obj = docs . support . my_module . MyClass ( ) obj . value = 5 obj . value except : raise RuntimeError ( "Tracing did not complete successfully" ) if not no_print : module_prefix = "docs.support.my_module." callable_names = [ "func" , "MyClass.value" ] for callable_name in callable_names : callable_name = module_prefix + callable_name print ( "\nCallable: {0}" . format ( callable_name ) ) print ( exdoc_obj . get_sphinx_doc ( callable_name , width = 70 ) ) print ( "\n" ) return copy . copy ( exdoc_obj )
7,980
https://github.com/pmacosta/pexdoc/blob/201ac243e5781347feb75896a4231429fe6da4b1/docs/support/trace_my_module_2.py#L14-L32
[ "def", "vn_release", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "call", "!=", "'function'", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_reserve function must be called with -f or --function.'", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "vn_id", "=", "kwargs", ".", "get", "(", "'vn_id'", ",", "None", ")", "vn_name", "=", "kwargs", ".", "get", "(", "'vn_name'", ",", "None", ")", "path", "=", "kwargs", ".", "get", "(", "'path'", ",", "None", ")", "data", "=", "kwargs", ".", "get", "(", "'data'", ",", "None", ")", "if", "vn_id", ":", "if", "vn_name", ":", "log", ".", "warning", "(", "'Both the \\'vn_id\\' and \\'vn_name\\' arguments were provided. '", "'\\'vn_id\\' will take precedence.'", ")", "elif", "vn_name", ":", "vn_id", "=", "get_vn_id", "(", "kwargs", "=", "{", "'name'", ":", "vn_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires a \\'vn_id\\' or a \\'vn_name\\' to '", "'be provided.'", ")", "if", "data", ":", "if", "path", ":", "log", ".", "warning", "(", "'Both the \\'data\\' and \\'path\\' arguments were provided. '", "'\\'data\\' will take precedence.'", ")", "elif", "path", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "path", ",", "mode", "=", "'r'", ")", "as", "rfh", ":", "data", "=", "rfh", ".", "read", "(", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires either \\'data\\' or a \\'path\\' to '", "'be provided.'", ")", "server", ",", "user", ",", "password", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "vn", ".", "release", "(", "auth", ",", "int", "(", "vn_id", ")", ",", "data", ")", "ret", "=", "{", "'action'", ":", "'vn.release'", ",", "'released'", ":", "response", "[", "0", "]", ",", "'resource_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", ",", "}", "return", "ret" ]
Create read - only copy
def ro ( self ) : request = WWebRequest ( self . session ( ) , self . method ( ) , self . path ( ) , headers = self . headers ( ) . ro ( ) , request_data = self . request_data ( ) ) request . __ro_flag = True return request
7,981
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/web/request.py#L154-L164
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Defines simple contact form that can be used to contact a site member passed by username in the URL or to all superusers or to a list defined in settings . DEFAULT_CONTACTS .
def simple_contact ( request , username = "" ) : site = Site . objects . get_current ( ) form = ContactForm ( request . POST or None ) UserModel = get_user_model ( ) recipients = [ ] site_form = False logger . debug ( 'Recipients should be empty: %s' % recipients ) # if we know, fill in the user name and email if request . user . is_authenticated : # first, resolve username for tango and non-tango sites try : name = request . user . display_name except AttributeError : name = request . user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = request . user . email if username : member = get_object_or_404 ( UserModel , username = username ) recipients = [ member . email , ] logger . debug ( 'Recipients should be a single user: %s' % recipients ) else : # site contact form. # Use first of settings.DEFAULT_CONTACTS or all superusers site_form = True member = None recipients = getattr ( settings , "DEFAULT_CONTACTS" , None ) logger . debug ( 'Recipients should be match DEFAULT_CONTACTS: %s' % recipients ) if not recipients : recipients = UserModel . objects . filter ( is_superuser = True ) . values_list ( 'email' , flat = True ) warnings . warn ( "settings.DEFAULT_CONTACTS does not exist. You may want to create it." , RuntimeWarning ) logger . debug ( 'Recipients should be superusers: %s' % recipients ) if form . is_valid ( ) : if site_form : subject = "A {} contact form submission from {}" . format ( site . name , form . cleaned_data [ 'sender_name' ] ) else : subject = "A message from {} on {}" . format ( form . cleaned_data [ 'sender_name' ] , site . name ) body = form . cleaned_data [ 'body' ] sender_email = form . cleaned_data [ 'sender_email' ] if 'send_a_copy' in request . POST : recipients . append ( sender_email ) logger . debug ( 'Recipients should be match prior + sender email: %s' % recipients ) mail = EmailMessage ( subject = subject , body = body , from_email = sender_email , to = recipients ) mail . send ( ) return HttpResponseRedirect ( success_url ) return render ( request , 'contact/simple_form.html' , { 'form' : form , 'site' : site , 'member' : member } )
7,982
https://github.com/tBaxter/tango-contact-manager/blob/7bd5be326a8db8f438cdefff0fbd14849d0474a5/build/lib/contact_manager/views.py#L61-L135
[ "def", "border", "(", "self", ")", ":", "border_array", "=", "self", ".", "bitmap", "-", "self", ".", "inner", ".", "bitmap", "return", "Region", "(", "border_array", ")" ]
Builds appropriate contact form based on options set in the contact_form controller .
def build_contact ( request , slug = "" ) : controller = get_object_or_404 ( ContactFormController , slug = slug ) site = Site . objects . get_current ( ) UserModel = get_user_model ( ) user = request . user form = ContactForm ( request . POST or None , request . FILES or None , controller = controller ) # if we know, fill in the user name and email if user . is_authenticated : # first, resolve username for tango and non-tango sites try : name = user . display_name except AttributeError : name = user . username form . fields [ 'sender_name' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_name' ] . initial = name form . fields [ 'sender_email' ] . widget . attrs [ 'readonly' ] = 'true' form . fields [ 'sender_email' ] . initial = user . email if form . is_valid ( ) : if controller . store_in_db : # To do: sanitize submission. new_msg = Contact ( * * form . cleaned_data ) new_msg . controller = controller new_msg . site = site if controller . override_subject : # we're overriding the subject new_msg . subject = controller . override_subject new_msg . save ( ) if controller . send_emails : form_data = form . cleaned_data if controller . override_subject : subject = controller . override_subject elif 'subject' in form_data : subject = form_data [ 'subject' ] else : subject = "{} message from {}" . format ( controller . name , form_data [ 'sender_name' ] ) body = "{} \n\n {}" . format ( form_data [ 'body' ] , form_data [ 'sender_name' ] ) if controller . request_contact_info : body += "\nAddress: {} \nCity: {} \nState: {} \nPhone: {}" . format ( form_data [ 'contact_address' ] , form_data [ 'contact_city' ] , form_data [ 'contact_state' ] , form_data [ 'contact_phone' ] ) if controller . email_options == '2' : # Create selectable list from recipients try : to = [ UserModel . objects . get ( username = form . cleaned_data [ 'to' ] ) . email ] except Exception : to = [ form . cleaned_data [ 'to' ] ] if controller . email_options == '1' : to = [ r . email for r in controller . recipients . all ( ) ] for r in controller . other_recipients . all ( ) : to . append ( r . email ) if 'send_a_copy' in form . cleaned_data : to . append ( form . cleaned_data [ 'sender_email' ] ) mail = EmailMessage ( subject = subject , body = body , from_email = form . cleaned_data [ 'sender_email' ] , to = to ) if 'photo' in request . FILES : photo = request . FILES [ 'photo' ] mail . attach ( photo . name , photo . read ( ) , photo . content_type ) mail . send ( ) return render ( request , 'success_url' , { 'controller' : controller } ) return render ( request , 'contact/form.html' , { 'form' : form , 'site' : site , 'controller' : controller } )
7,983
https://github.com/tBaxter/tango-contact-manager/blob/7bd5be326a8db8f438cdefff0fbd14849d0474a5/build/lib/contact_manager/views.py#L138-L219
[ "def", "get_titles", "(", ")", ":", "if", "os", ".", "name", "==", "'posix'", ":", "for", "proc", "in", "get_processes", "(", ")", ":", "cmd", "=", "[", "'xdotool'", ",", "'search'", ",", "'--name'", ",", "proc", "]", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "window_ids", "=", "proc", ".", "communicate", "(", ")", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", "if", "window_ids", ":", "for", "window_id", "in", "window_ids", ".", "split", "(", "'\\n'", ")", ":", "cmd", "=", "[", "'xdotool'", ",", "'getwindowname'", ",", "window_id", "]", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "title", "=", "proc", ".", "communicate", "(", ")", "[", "0", "]", ".", "decode", "(", "'utf-8'", ")", "try", ":", "if", "title", "[", "-", "1", "]", "==", "'\\n'", ":", "title", "=", "title", "[", ":", "-", "1", "]", "yield", "title", "except", "IndexError", ":", "pass", "else", ":", "raise", "NotImplementedError" ]
Generate a webapp2 . RequestHandler class for the pale endpoint .
def pale_webapp2_request_handler_generator ( pale_endpoint ) : def pale_handler ( self , * args , * * kwargs ) : if self . request . method == "OPTIONS" : origin = self . request . headers . get ( "Origin" , None ) self . response . headers [ 'Access-Control-Allow-Origin' ] = origin self . response . headers [ 'Access-Control-Allow-Headers' ] = 'Origin, X-Requested-With, Content-Type, Accept' self . response . headers [ 'Access-Control-Allow-Methods' ] = 'POST, GET, PUT, DELETE' self . response . headers [ 'Access-Control-Allow-Credentials' ] = 'true' return self . response try : return pale_endpoint . _execute ( self . request ) finally : pale_endpoint . _finally ( ) cls = type ( pale_endpoint . _route_name , ( webapp2 . RequestHandler , ) , dict ( pale_handler = pale_handler ) ) return cls
7,984
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/adapters/webapp2.py#L20-L44
[ "def", "get_inactive", "(", ")", "->", "List", "[", "str", "]", ":", "inactive", "=", "[", "]", "for", "i", "in", "range", "(", "__num_subarrays__", ")", ":", "key", "=", "Subarray", ".", "get_key", "(", "i", ")", "if", "DB", ".", "get_hash_value", "(", "key", ",", "'active'", ")", ".", "upper", "(", ")", "==", "'FALSE'", ":", "inactive", ".", "append", "(", "Subarray", ".", "get_id", "(", "i", ")", ")", "return", "inactive" ]
Binds a Pale API implementation to a webapp2 WSGIApplication
def bind_pale_to_webapp2 ( pale_app_module , webapp_wsgiapplication , route_prefix = None ) : if not isinstance ( webapp_wsgiapplication , webapp2 . WSGIApplication ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in webapp_wsgiapplication to be an instance of " "WSGIApplication, but it was an instance of %s instead." % ( type ( webapp_wsgiapplication ) , ) ) if not pale . is_pale_module ( pale_app_module ) : raise TypeError ( "pale.adapters.webapp2.bind_pale_to_webapp2 expected " "the passed in pale_app_module to be a Python module with a " "`_module_type` value equal to `pale.ImplementationModule`, " "but it found an instance of %s instead." % ( type ( pale_app_module ) , ) ) endpoints = pale . extract_endpoints ( pale_app_module ) for endpoint in endpoints : endpoint . _set_response_class ( RESPONSE_CLASS ) method = endpoint . _http_method name = endpoint . _route_name req_handler = pale_webapp2_request_handler_generator ( endpoint ) route_uri = endpoint . _uri if route_prefix is not None : route_uri = "%s%s" % ( route_prefix , route_uri ) route = webapp2 . Route ( route_uri , handler = req_handler , name = name , handler_method = 'pale_handler' , methods = [ method , "OPTIONS" ] ) webapp_wsgiapplication . router . add ( route )
7,985
https://github.com/Loudr/pale/blob/dc002ee6032c856551143af222ff8f71ed9853fe/pale/adapters/webapp2.py#L47-L84
[ "def", "_check_if_all_updated", "(", "self", ")", ":", "for", "g", "in", "self", ".", "toc", ".", "toc", ":", "if", "g", "not", "in", "self", ".", "values", ":", "return", "False", "for", "n", "in", "self", ".", "toc", ".", "toc", "[", "g", "]", ":", "if", "n", "not", "in", "self", ".", "values", "[", "g", "]", ":", "return", "False", "return", "True" ]
Methods appends modification_code to the specified envelope .
def encode ( self , envelope , session , target = None , modification_code = None , * * kwargs ) : self . __args_check ( envelope , target , modification_code ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : # isinstance(envelope, WMessengerBytesEnvelope) target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : return target_envelope_cls ( modification_code + envelope . message ( ) , meta = envelope ) else : # target == WMessengerFixedModificationLayer.Target.tail return target_envelope_cls ( envelope . message ( ) + modification_code , meta = envelope )
7,986
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/messenger/coders.py#L103-L124
[ "def", "empirical_sinkhorn", "(", "X_s", ",", "X_t", ",", "reg", ",", "a", "=", "None", ",", "b", "=", "None", ",", "metric", "=", "'sqeuclidean'", ",", "numIterMax", "=", "10000", ",", "stopThr", "=", "1e-9", ",", "verbose", "=", "False", ",", "log", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "a", "is", "None", ":", "a", "=", "unif", "(", "np", ".", "shape", "(", "X_s", ")", "[", "0", "]", ")", "if", "b", "is", "None", ":", "b", "=", "unif", "(", "np", ".", "shape", "(", "X_t", ")", "[", "0", "]", ")", "M", "=", "dist", "(", "X_s", ",", "X_t", ",", "metric", "=", "metric", ")", "if", "log", ":", "pi", ",", "log", "=", "sinkhorn", "(", "a", ",", "b", ",", "M", ",", "reg", ",", "numItermax", "=", "numIterMax", ",", "stopThr", "=", "stopThr", ",", "verbose", "=", "verbose", ",", "log", "=", "True", ",", "*", "*", "kwargs", ")", "return", "pi", ",", "log", "else", ":", "pi", "=", "sinkhorn", "(", "a", ",", "b", ",", "M", ",", "reg", ",", "numItermax", "=", "numIterMax", ",", "stopThr", "=", "stopThr", ",", "verbose", "=", "verbose", ",", "log", "=", "False", ",", "*", "*", "kwargs", ")", "return", "pi" ]
Methods checks envelope for modification_code existence and removes it .
def decode ( self , envelope , session , target = None , modification_code = None , * * kwargs ) : self . __args_check ( envelope , target , modification_code ) message = envelope . message ( ) if len ( message ) < len ( modification_code ) : raise ValueError ( 'Invalid message length' ) if isinstance ( envelope , WMessengerTextEnvelope ) : target_envelope_cls = WMessengerTextEnvelope else : # isinstance(envelope, WMessengerBytesEnvelope) target_envelope_cls = WMessengerBytesEnvelope if target == WMessengerFixedModificationLayer . Target . head : if message [ : len ( modification_code ) ] != modification_code : raise ValueError ( 'Invalid header in message' ) return target_envelope_cls ( message [ len ( modification_code ) : ] , meta = envelope ) else : # target == WMessengerFixedModificationLayer.Target.tail if message [ - len ( modification_code ) : ] != modification_code : raise ValueError ( 'Invalid tail in message' ) return target_envelope_cls ( message [ : - len ( modification_code ) ] , meta = envelope )
7,987
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/network/messenger/coders.py#L128-L157
[ "def", "set_baudrate", "(", "self", ",", "channel", ",", "BTR", ",", "baudarate", ")", ":", "UcanSetBaudrateEx", "(", "self", ".", "_handle", ",", "channel", ",", "BTR", ">>", "8", ",", "BTR", ",", "baudarate", ")" ]
Return tasks that was started . Result way be filtered by the given arguments .
def started_tasks ( self , task_registry_id = None , task_cls = None ) : if task_registry_id is not None : task = None for registered_task in self . __started : if registered_task . __registry_tag__ == task_registry_id : task = registered_task if task_cls is not None and task is not None : if isinstance ( task , task_cls ) is True : return task return None return task result = filter ( lambda x : x is not None , self . __started ) if task_cls is not None : result = filter ( lambda x : isinstance ( x , task_cls ) , result ) return tuple ( result )
7,988
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/dependency.py#L153-L178
[ "def", "patch_requests", "(", ")", ":", "config", ".", "create_config_directory", "(", ")", "ca_certs_file", "=", "config", ".", "CERT_FILE", "ca_certs_contents", "=", "requests", ".", "__loader__", ".", "get_data", "(", "'requests/cacert.pem'", ")", "should_write_certs", "=", "True", "if", "os", ".", "path", ".", "isfile", "(", "ca_certs_file", ")", ":", "with", "open", "(", "ca_certs_file", ",", "'rb'", ")", "as", "f", ":", "existing_certs", "=", "f", ".", "read", "(", ")", "if", "existing_certs", "!=", "ca_certs_contents", ":", "should_write_certs", "=", "True", "print", "(", "\"Updating local SSL certificates\"", ")", "else", ":", "should_write_certs", "=", "False", "if", "should_write_certs", ":", "with", "open", "(", "ca_certs_file", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "(", "ca_certs_contents", ")", "os", ".", "environ", "[", "'REQUESTS_CA_BUNDLE'", "]", "=", "ca_certs_file" ]
Stop task with the given task tag . If task already stopped then nothing happens .
def stop_task ( self , task_tag , stop_dependent = True , stop_requirements = False ) : # TODO: "coverage" requires more tests task = self . started_tasks ( task_registry_id = task_tag ) if task is None : return def stop ( task_to_stop ) : if task_to_stop in self . __started : if isinstance ( task_to_stop , WStoppableTask ) is True : task_to_stop . stop ( ) self . __started . remove ( task_to_stop ) def stop_dependency ( task_to_stop ) : deeper_dependencies = [ ] for dependent_task in self . __started : if task_to_stop . __registry_tag__ in dependent_task . __class__ . __dependency__ : deeper_dependencies . append ( dependent_task ) for dependent_task in deeper_dependencies : stop_dependency ( dependent_task ) stop ( task_to_stop ) def calculate_requirements ( task_to_stop , cross_requirements = False ) : requirements = set ( ) for dependent_task in self . __started : if dependent_task . __class__ . __registry_tag__ in task_to_stop . __class__ . __dependency__ : requirements . add ( dependent_task ) if cross_requirements is True : return requirements result = set ( ) for task_a in requirements : requirement_match = False for task_b in requirements : if task_a . __class__ . __registry_tag__ in task_b . __class__ . __dependency__ : requirement_match = True break if requirement_match is False : result . add ( task_a ) return result def calculate_priorities ( task_to_stop , * extra_tasks , current_result = None , requirements_left = None ) : if current_result is None : current_result = [ ] tasks_to_stop = [ task_to_stop ] if len ( extra_tasks ) > 0 : tasks_to_stop . extend ( extra_tasks ) current_result . append ( list ( tasks_to_stop ) ) all_requirements = calculate_requirements ( tasks_to_stop [ 0 ] , cross_requirements = True ) nested_requirements = calculate_requirements ( tasks_to_stop [ 0 ] ) for dependent_task in tasks_to_stop [ 1 : ] : nested_requirements = nested_requirements . union ( calculate_requirements ( dependent_task ) ) all_requirements . update ( calculate_requirements ( dependent_task , cross_requirements = True ) ) all_requirements = all_requirements . difference ( nested_requirements ) if requirements_left is not None : requirements_left = requirements_left . difference ( all_requirements ) nested_requirements . update ( requirements_left ) if len ( nested_requirements ) == 0 : return current_result return calculate_priorities ( * list ( nested_requirements ) , current_result = current_result , requirements_left = all_requirements ) if stop_dependent is True : stop_dependency ( task ) if stop_requirements is True : for task_list in calculate_priorities ( task ) : for single_task in task_list : stop ( single_task ) if stop_dependent is not True : # check if we've already stopped this task stop ( task )
7,989
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/dependency.py#L218-L312
[ "def", "cursor_constrain", "(", "self", ")", ":", "self", ".", "cur_r", "=", "constrain", "(", "self", ".", "cur_r", ",", "1", ",", "self", ".", "rows", ")", "self", ".", "cur_c", "=", "constrain", "(", "self", ".", "cur_c", ",", "1", ",", "self", ".", "cols", ")" ]
Start task from registry
def start_task ( cls , task_tag , skip_unresolved = False ) : registry = cls . registry_storage ( ) registry . start_task ( task_tag , skip_unresolved = skip_unresolved )
7,990
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/dependency.py#L338-L346
[ "def", "strain_in_plane", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_strain_out_of_plane", "is", "not", "None", ":", "return", "(", "(", "self", ".", "_strain_out_of_plane", "/", "-", "2.", ")", "*", "(", "self", ".", "unstrained", ".", "c11", "(", "*", "*", "kwargs", ")", "/", "self", ".", "unstrained", ".", "c12", "(", "*", "*", "kwargs", ")", ")", ")", "else", ":", "return", "1", "-", "self", ".", "unstrained", ".", "a", "(", "*", "*", "kwargs", ")", "/", "self", ".", "substrate", ".", "a", "(", "*", "*", "kwargs", ")" ]
Stop started task from registry
def stop_task ( cls , task_tag , stop_dependent = True , stop_requirements = False ) : registry = cls . registry_storage ( ) registry . stop_task ( task_tag , stop_dependent = stop_dependent , stop_requirements = stop_requirements )
7,991
https://github.com/a1ezzz/wasp-general/blob/1029839d33eb663f8dec76c1c46754d53c1de4a9/wasp_general/task/dependency.py#L349-L358
[ "def", "pages", "(", "self", ")", ":", "# protected access has to be permitted here to not close the paginator's pages", "# pylint: disable=protected-access", "paginator_pages", "=", "list", "(", "self", ".", "paginator", ".", "_pages", ")", "if", "len", "(", "self", ".", "paginator", ".", "_current_page", ")", ">", "1", ":", "paginator_pages", ".", "append", "(", "'\\n'", ".", "join", "(", "self", ".", "paginator", ".", "_current_page", ")", "+", "'\\n'", "+", "(", "self", ".", "paginator", ".", "suffix", "or", "''", ")", ")", "# pylint: enable=protected-access", "return", "paginator_pages" ]
When this function is invoced in a notebook cell the cell is snipped .
def snip_this ( tag = "" , write_date = True ) : snip ( tag = tag , start = - 1 , write_date = write_date )
7,992
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L51-L53
[ "def", "all", "(", "cls", ")", ":", "# Start with raw queues we know exist from the config", "queues", "=", "{", "x", ":", "0", "for", "x", "in", "Queue", ".", "get_queues_config", "(", ")", "}", "stats", "=", "list", "(", "context", ".", "connections", ".", "mongodb_jobs", ".", "mrq_jobs", ".", "aggregate", "(", "[", "{", "\"$match\"", ":", "{", "\"status\"", ":", "\"queued\"", "}", "}", ",", "{", "\"$group\"", ":", "{", "\"_id\"", ":", "\"$queue\"", ",", "\"jobs\"", ":", "{", "\"$sum\"", ":", "1", "}", "}", "}", "]", ")", ")", "queues", ".", "update", "(", "{", "x", "[", "\"_id\"", "]", ":", "x", "[", "\"jobs\"", "]", "for", "x", "in", "stats", "}", ")", "return", "queues" ]
This function retrieves a tagged or untagged snippet .
def unsnip ( tag = None , start = - 1 ) : import IPython i = IPython . get_ipython ( ) if tag in _tagged_inputs . keys ( ) : if len ( _tagged_inputs [ tag ] ) > 0 : i . set_next_input ( _tagged_inputs [ tag ] [ start ] ) else : if len ( _last_inputs ) > 0 : i . set_next_input ( _last_inputs [ start ] )
7,993
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L55-L64
[ "def", "handle_scrollwheel", "(", "self", ",", "event", ")", ":", "delta_x", ",", "delta_y", ",", "delta_z", "=", "self", ".", "_get_deltas", "(", "event", ")", "if", "delta_x", ":", "self", ".", "events", ".", "append", "(", "self", ".", "emulate_wheel", "(", "delta_x", ",", "'x'", ",", "self", ".", "timeval", ")", ")", "if", "delta_y", ":", "self", ".", "events", ".", "append", "(", "self", ".", "emulate_wheel", "(", "delta_y", ",", "'y'", ",", "self", ".", "timeval", ")", ")", "if", "delta_z", ":", "self", ".", "events", ".", "append", "(", "self", ".", "emulate_wheel", "(", "delta_z", ",", "'z'", ",", "self", ".", "timeval", ")", ")" ]
alerts the user of something happening via notify - send . If it is not installed the alert will be printed to the console .
def alert ( msg , body = "" , icon = None ) : if type ( body ) == str : body = body [ : 200 ] if call ( [ "which" , "notify-send" ] ) == 0 : if icon is not None : call ( [ "notify-send" , msg , "-i" , icon , body ] ) else : call ( [ "notify-send" , msg , body ] ) else : print ( ( "ALERT: " , msg ) )
7,994
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L271-L283
[ "def", "populateFromDirectory", "(", "self", ",", "vcfDirectory", ")", ":", "pattern", "=", "os", ".", "path", ".", "join", "(", "vcfDirectory", ",", "\"*.vcf.gz\"", ")", "dataFiles", "=", "[", "]", "indexFiles", "=", "[", "]", "for", "vcfFile", "in", "glob", ".", "glob", "(", "pattern", ")", ":", "dataFiles", ".", "append", "(", "vcfFile", ")", "indexFiles", ".", "append", "(", "vcfFile", "+", "\".tbi\"", ")", "self", ".", "populateFromFile", "(", "dataFiles", ",", "indexFiles", ")" ]
Iterates through generators recursively and flattens them .
def recgen ( gen , fix_type_errors = True ) : if not hasattr ( gen , '__iter__' ) : yield gen else : try : for i in gen : for ii in recgen ( i ) : yield ii except TypeError : # oops, it seems it was not an iterable even if it had an __iter__ method... # this happens eg. with theano tensor variables as they try to trick you to sum them. if not fix_type_errors : raise # maybe you want this Exception? yield gen
7,995
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L510-L538
[ "def", "validate_instance_dbname", "(", "self", ",", "dbname", ")", ":", "# 1-64 alphanumeric characters, cannot be a reserved MySQL word", "if", "re", ".", "match", "(", "'[\\w-]+$'", ",", "dbname", ")", "is", "not", "None", ":", "if", "len", "(", "dbname", ")", "<=", "41", "and", "len", "(", "dbname", ")", ">=", "1", ":", "if", "dbname", ".", "lower", "(", ")", "not", "in", "MYSQL_RESERVED_WORDS", ":", "return", "True", "return", "'*** Error: Database names must be 1-64 alphanumeric characters,\\\n cannot be a reserved MySQL word.'" ]
Takes a list of dictionaries and creates a dictionary with the combined values for each key in each dicitonary . Missing values are set to None for each dicitonary that does not contain a key that is present in at least one other dicitonary .
def list_of_dicts_to_dict_of_lists ( list_of_dictionaries ) : result = { } all_keys = set ( [ k for d in list_of_dictionaries for k in d . keys ( ) ] ) for d in list_of_dictionaries : for k in all_keys : result . setdefault ( k , [ ] ) . append ( d . get ( k , None ) ) return result
7,996
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L564-L582
[ "def", "window_size", "(", "self", ",", "window_size", ")", ":", "BasePlotter", ".", "window_size", ".", "fset", "(", "self", ",", "window_size", ")", "self", ".", "app_window", ".", "setBaseSize", "(", "*", "window_size", ")" ]
Takes a dictionary of lists and creates a list of dictionaries . If the lists are of unequal length the remaining entries are set to None .
def dict_of_lists_to_list_of_dicts ( dictionary_of_lists ) : return [ { key : dictionary_of_lists [ key ] [ index ] if len ( dictionary_of_lists [ key ] ) > index else None for key in dictionary_of_lists . keys ( ) } for index in range ( max ( map ( len , dictionary_of_lists . values ( ) ) ) ) ]
7,997
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L586-L599
[ "def", "from_offset", "(", "tu", ",", "file", ",", "offset", ")", ":", "return", "conf", ".", "lib", ".", "clang_getLocationForOffset", "(", "tu", ",", "file", ",", "offset", ")" ]
like enumerate but with colors
def colorate ( sequence , colormap = "" , start = 0 , length = None ) : n = start colors = color_space ( colormap , sequence , start = 0.1 , stop = 0.9 , length = length ) for elem in sequence : yield n , colors [ n - start ] , elem n += 1
7,998
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L618-L624
[ "def", "from_directory", "(", "input_dir", ",", "optional_files", "=", "None", ")", ":", "sub_d", "=", "{", "}", "for", "fname", ",", "ftype", "in", "[", "(", "\"INCAR\"", ",", "Incar", ")", ",", "(", "\"KPOINTS\"", ",", "Kpoints", ")", ",", "(", "\"POSCAR\"", ",", "Poscar", ")", ",", "(", "\"POTCAR\"", ",", "Potcar", ")", "]", ":", "fullzpath", "=", "zpath", "(", "os", ".", "path", ".", "join", "(", "input_dir", ",", "fname", ")", ")", "sub_d", "[", "fname", ".", "lower", "(", ")", "]", "=", "ftype", ".", "from_file", "(", "fullzpath", ")", "sub_d", "[", "\"optional_files\"", "]", "=", "{", "}", "if", "optional_files", "is", "not", "None", ":", "for", "fname", ",", "ftype", "in", "optional_files", ".", "items", "(", ")", ":", "sub_d", "[", "\"optional_files\"", "]", "[", "fname", "]", "=", "ftype", ".", "from_file", "(", "os", ".", "path", ".", "join", "(", "input_dir", ",", "fname", ")", ")", "return", "VaspInput", "(", "*", "*", "sub_d", ")" ]
run once to create all children containers for each combination of the keywords
def generate ( self , * * kwargs ) : import collections all_params = cartesian_dicts ( { k : kwargs [ k ] for k in kwargs . keys ( ) if isinstance ( kwargs [ k ] , collections . Iterable ) } ) for pi , p in enumerate ( all_params ) : if self . name_mode == 'int' : n = str ( len ( self . containers ) ) else : n = None self . containers . append ( PDContainer ( name = n , params = p , parent = self ) ) self . parameters . update ( { k : kwargs [ k ] for k in kwargs . keys ( ) if not isinstance ( kwargs [ k ] , collections . Iterable ) } ) self . save ( )
7,999
https://github.com/jahuth/litus/blob/712b016ea2dbb1cf0a30bfdbb0a136945a7b7c5e/__init__.py#L764-L775
[ "def", "RMSError", "(", "self", ")", ":", "tss", "=", "self", ".", "TSSError", "(", ")", "return", "math", ".", "sqrt", "(", "tss", "/", "self", ".", "size", ")" ]