query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Compute the stress transfer function .
def calc_stress_tf ( self , lin , lout , damped ) : tf = self . calc_strain_tf ( lin , lout ) if damped : # Scale by complex shear modulus to include the influence of # damping tf *= lout . layer . comp_shear_mod else : tf *= lout . layer . shear_mod return tf
3,900
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L380-L400
[ "def", "delete_logs", "(", "room", ")", ":", "from", "indico_chat", ".", "plugin", "import", "ChatPlugin", "base_url", "=", "ChatPlugin", ".", "settings", ".", "get", "(", "'log_url'", ")", "if", "not", "base_url", "or", "room", ".", "custom_server", ":", "return", "try", ":", "response", "=", "requests", ".", "get", "(", "posixpath", ".", "join", "(", "base_url", ",", "'delete'", ")", ",", "params", "=", "{", "'cr'", ":", "room", ".", "jid", "}", ")", ".", "json", "(", ")", "except", "(", "RequestException", ",", "ValueError", ")", ":", "current_plugin", ".", "logger", ".", "exception", "(", "'Could not delete logs for %s'", ",", "room", ".", "jid", ")", "return", "if", "not", "response", ".", "get", "(", "'success'", ")", ":", "current_plugin", ".", "logger", ".", "warning", "(", "'Could not delete logs for %s: %s'", ",", "room", ".", "jid", ",", "response", ".", "get", "(", "'error'", ")", ")" ]
Compute the strain transfer function from lout to location_in .
def calc_strain_tf ( self , lin , lout ) : # FIXME: Correct discussion for using acceleration FAS # Strain(angFreq, z=h_m/2) # ------------------------ = # accel_n(angFreq) # # i k*_m [ A_m exp(i k*_m h_m / 2) - B_m exp(-i k*_m h_m / 2)] # ------------------------------------------------------------ # -angFreq^2 (2 * A_n) # assert lout . wave_field == WaveField . within ang_freqs = self . motion . angular_freqs # The numerator cannot be computed using wave_at_location() because # it is A - B. cterm = 1j * self . _wave_nums [ lout . index , : ] * lout . depth_within numer = ( 1j * self . _wave_nums [ lout . index , : ] * ( self . _waves_a [ lout . index , : ] * np . exp ( cterm ) - self . _waves_b [ lout . index , : ] * np . exp ( - cterm ) ) ) denom = - ang_freqs ** 2 * self . wave_at_location ( lin ) # Only compute transfer function for non-zero frequencies mask = ~ np . isclose ( ang_freqs , 0 ) tf = np . zeros_like ( mask , dtype = np . complex ) # Scale into units from gravity tf [ mask ] = GRAVITY * numer [ mask ] / denom [ mask ] return tf
3,901
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L402-L448
[ "def", "ensure_iam", "(", "self", ",", "publisher", "=", "None", ")", ":", "topic", "=", "self", ".", "get_topic_param", "(", ")", "client", "=", "self", ".", "session", ".", "client", "(", "'pubsub'", ",", "'v1'", ",", "'projects.topics'", ")", "policy", "=", "client", ".", "execute_command", "(", "'getIamPolicy'", ",", "{", "'resource'", ":", "topic", "}", ")", "policy", ".", "pop", "(", "'etag'", ")", "found", "=", "False", "for", "binding", "in", "policy", ".", "get", "(", "'bindings'", ",", "{", "}", ")", ":", "if", "binding", "[", "'role'", "]", "!=", "'roles/pubsub.publisher'", ":", "continue", "if", "publisher", "in", "binding", "[", "'members'", "]", ":", "return", "found", "=", "binding", "if", "not", "found", ":", "policy", ".", "setdefault", "(", "'bindings'", ",", "{", "'members'", ":", "[", "publisher", "]", ",", "'role'", ":", "'roles/pubsub.publisher'", "}", ")", "else", ":", "found", "[", "'members'", "]", ".", "append", "(", "publisher", ")", "client", ".", "execute_command", "(", "'setIamPolicy'", ",", "{", "'resource'", ":", "topic", ",", "'body'", ":", "{", "'policy'", ":", "policy", "}", "}", ")" ]
Compute an estimate of the strains .
def _estimate_strains ( self ) : # Estimate the strain based on the PGV and shear-wave velocity for l in self . _profile : l . reset ( ) l . strain = self . _motion . pgv / l . initial_shear_vel
3,902
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L514-L519
[ "def", "get_last_components_by_type", "(", "component_types", ",", "topic_id", ",", "db_conn", "=", "None", ")", ":", "db_conn", "=", "db_conn", "or", "flask", ".", "g", ".", "db_conn", "schedule_components_ids", "=", "[", "]", "for", "ct", "in", "component_types", ":", "where_clause", "=", "sql", ".", "and_", "(", "models", ".", "COMPONENTS", ".", "c", ".", "type", "==", "ct", ",", "models", ".", "COMPONENTS", ".", "c", ".", "topic_id", "==", "topic_id", ",", "models", ".", "COMPONENTS", ".", "c", ".", "export_control", "==", "True", ",", "models", ".", "COMPONENTS", ".", "c", ".", "state", "==", "'active'", ")", "# noqa", "query", "=", "(", "sql", ".", "select", "(", "[", "models", ".", "COMPONENTS", ".", "c", ".", "id", "]", ")", ".", "where", "(", "where_clause", ")", ".", "order_by", "(", "sql", ".", "desc", "(", "models", ".", "COMPONENTS", ".", "c", ".", "created_at", ")", ")", ")", "cmpt_id", "=", "db_conn", ".", "execute", "(", "query", ")", ".", "fetchone", "(", ")", "if", "cmpt_id", "is", "None", ":", "msg", "=", "'Component of type \"%s\" not found or not exported.'", "%", "ct", "raise", "dci_exc", ".", "DCIException", "(", "msg", ",", "status_code", "=", "412", ")", "cmpt_id", "=", "cmpt_id", "[", "0", "]", "if", "cmpt_id", "in", "schedule_components_ids", ":", "msg", "=", "(", "'Component types %s malformed: type %s duplicated.'", "%", "(", "component_types", ",", "ct", ")", ")", "raise", "dci_exc", ".", "DCIException", "(", "msg", ",", "status_code", "=", "412", ")", "schedule_components_ids", ".", "append", "(", "cmpt_id", ")", "return", "schedule_components_ids" ]
Compute the strain used for iterations of material properties .
def _calc_strain ( self , loc_input , loc_layer , motion , * args ) : strain_max = self . _calc_strain_max ( loc_input , loc_layer , motion , * args ) return self . strain_ratio * strain_max
3,903
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L557-L560
[ "def", "list_supported_drivers", "(", ")", ":", "def", "convert_oslo_config", "(", "oslo_options", ")", ":", "options", "=", "[", "]", "for", "opt", "in", "oslo_options", ":", "tmp_dict", "=", "{", "k", ":", "str", "(", "v", ")", "for", "k", ",", "v", "in", "vars", "(", "opt", ")", ".", "items", "(", ")", "if", "not", "k", ".", "startswith", "(", "'_'", ")", "}", "options", ".", "append", "(", "tmp_dict", ")", "return", "options", "def", "list_drivers", "(", "queue", ")", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", "# Go to the parent directory directory where Cinder is installed", "os", ".", "chdir", "(", "utils", ".", "__file__", ".", "rsplit", "(", "os", ".", "sep", ",", "2", ")", "[", "0", "]", ")", "try", ":", "drivers", "=", "cinder_interface_util", ".", "get_volume_drivers", "(", ")", "mapping", "=", "{", "d", ".", "class_name", ":", "vars", "(", "d", ")", "for", "d", "in", "drivers", "}", "# Drivers contain class instances which are not serializable", "for", "driver", "in", "mapping", ".", "values", "(", ")", ":", "driver", ".", "pop", "(", "'cls'", ",", "None", ")", "if", "'driver_options'", "in", "driver", ":", "driver", "[", "'driver_options'", "]", "=", "convert_oslo_config", "(", "driver", "[", "'driver_options'", "]", ")", "finally", ":", "os", ".", "chdir", "(", "cwd", ")", "queue", ".", "put", "(", "mapping", ")", "# Use a different process to avoid having all driver classes loaded in", "# memory during our execution.", "queue", "=", "multiprocessing", ".", "Queue", "(", ")", "p", "=", "multiprocessing", ".", "Process", "(", "target", "=", "list_drivers", ",", "args", "=", "(", "queue", ",", ")", ")", "p", ".", "start", "(", ")", "result", "=", "queue", ".", "get", "(", ")", "p", ".", "join", "(", ")", "return", "result" ]
Compute the effective strain at the center of a layer .
def _calc_strain_max ( self , loc_input , loc_layer , motion , * args ) : return motion . calc_peak ( self . calc_strain_tf ( loc_input , loc_layer ) )
3,904
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L562-L565
[ "def", "open", "(", "self", ")", ":", "self", ".", "_connection", "=", "sqlite3", ".", "connect", "(", "self", ".", "_dbname", ")", "self", ".", "_cursor", "=", "self", ".", "_connection", ".", "cursor", "(", ")", "self", ".", "_session_info", "=", "SessionInfoTable", "(", "self", ".", "_connection", ",", "self", ".", "_cursor", ")", "self", ".", "_reports", "=", "ReportsTable", "(", "self", ".", "_connection", ",", "self", ".", "_cursor", ")" ]
Estimate the strains by running an EQL site response .
def _estimate_strains ( self ) : eql = EquivalentLinearCalculator ( ) eql ( self . _motion , self . _profile , self . _loc_input )
3,905
https://github.com/arkottke/pysra/blob/c72fd389d6c15203c0c00728ac00f101bae6369d/pysra/propagation.py#L606-L612
[ "def", "write_options_to_file", "(", "self", ",", "filename", ",", "file_format", "=", "'yaml'", ")", ":", "if", "file_format", ".", "lower", "(", ")", "==", "'yaml'", ":", "self", ".", "write_options_to_YAML", "(", "filename", ")", "elif", "file_format", ".", "lower", "(", ")", "==", "'json'", ":", "self", ".", "write_options_to_JSON", "(", "filename", ")", "else", ":", "raise", "ValueError", "(", "'Unknown format {}'", ".", "format", "(", "file_format", ")", ")" ]
Decorator to measure the time used by the recipe
def timeit ( method ) : import datetime @ functools . wraps ( method ) def timed_method ( self , rinput ) : time_start = datetime . datetime . utcnow ( ) result = method ( self , rinput ) time_end = datetime . datetime . utcnow ( ) result . time_it ( time_start , time_end ) self . logger . info ( 'total time measured' ) return result return timed_method
3,906
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/recipes.py#L261-L276
[ "def", "split_face", "(", "self", ",", "face", ",", "number", "=", "None", ",", "ids", "=", "None", ")", ":", "assert", "face", "in", "self", ".", "faces", "if", "ids", ":", "ids", "=", "set", "(", "ids", ")", "else", ":", "max_int", "=", "max", "(", "x", "for", "x", "in", "self", ".", "faces", "if", "isinstance", "(", "x", ",", "int", ")", ")", "ids", "=", "set", "(", "range", "(", "max_int", "+", "1", ",", "max_int", "+", "1", "+", "(", "number", "or", "2", ")", ")", ")", "for", "obj", "in", "self", ".", "topology", ".", "values", "(", ")", ":", "if", "face", "in", "obj", ":", "obj", ".", "discard", "(", "face", ")", "obj", ".", "update", "(", "ids", ")", "self", ".", "faces", ".", "discard", "(", "face", ")", "self", ".", "faces", ".", "update", "(", "ids", ")", "return", "ids" ]
Save intermediate FITS objects .
def save_intermediate_img ( self , img , name ) : if self . intermediate_results : img . writeto ( name , overwrite = True )
3,907
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/recipes.py#L161-L164
[ "def", "touch_member", "(", "config", ",", "dcs", ")", ":", "p", "=", "Postgresql", "(", "config", "[", "'postgresql'", "]", ")", "p", ".", "set_state", "(", "'running'", ")", "p", ".", "set_role", "(", "'master'", ")", "def", "restapi_connection_string", "(", "config", ")", ":", "protocol", "=", "'https'", "if", "config", ".", "get", "(", "'certfile'", ")", "else", "'http'", "connect_address", "=", "config", ".", "get", "(", "'connect_address'", ")", "listen", "=", "config", "[", "'listen'", "]", "return", "'{0}://{1}/patroni'", ".", "format", "(", "protocol", ",", "connect_address", "or", "listen", ")", "data", "=", "{", "'conn_url'", ":", "p", ".", "connection_string", ",", "'api_url'", ":", "restapi_connection_string", "(", "config", "[", "'restapi'", "]", ")", ",", "'state'", ":", "p", ".", "state", ",", "'role'", ":", "p", ".", "role", "}", "return", "dcs", ".", "touch_member", "(", "data", ",", "permanent", "=", "True", ")" ]
Save intermediate array object as FITS .
def save_intermediate_array ( self , array , name ) : if self . intermediate_results : fits . writeto ( name , array , overwrite = True )
3,908
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/recipes.py#L166-L169
[ "def", "start_vm", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "vm_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourceGroups/'", ",", "resource_group", ",", "'/providers/Microsoft.Compute/virtualMachines/'", ",", "vm_name", ",", "'/start'", ",", "'?api-version='", ",", "COMP_API", "]", ")", "return", "do_post", "(", "endpoint", ",", "''", ",", "access_token", ")" ]
Build a RecipeInput object .
def build_recipe_input ( self , ob , dal ) : result = { } # We have to decide if the ob input # is a plain description (ObservingBlock) # or if it contains the nested results (Obsres) # # it has to contain the tags corresponding to the observing modes... ob_query_skip = False ob_query_field = 'obresult' if isinstance ( ob , ObservingBlock ) : import numina . types . obsresult as obtype # We have to build an Obsres for key , req in self . requirements ( ) . items ( ) : if isinstance ( req . type , obtype . ObservationResultType ) : ob_query_field = key ob_query_skip = True query_option = self . query_options . get ( key ) # print('req for ob is named', key, query_option) new_or = ObservationResult ( ) new_or . __dict__ = ob . __dict__ obsres = req . query ( dal , new_or , options = query_option ) tagger = self . mode . tagger if tagger is not None : self . logger . debug ( 'Use mode tagger to fill tags in OB' ) obsres . tags = tagger ( obsres ) else : obsres . tags = None break else : # nothing to do obsres = ob else : obsres = ob # Get tags_names per REQ self . logger . debug ( 'getting query fields per REQ' ) qfields = set ( ) for key , req in self . requirements ( ) . items ( ) : tag_n = req . tag_names ( ) self . logger . debug ( "%s has these query fields %s" , key , tag_n ) qfields . update ( tag_n ) if obsres . tags is None : self . logger . debug ( 'running recipe tagger' ) self . logger . debug ( 'with query fields %s' , qfields ) if qfields : obsres . tags = self . obsres_extractor ( obsres , qfields ) else : obsres . tags = { } for key , req in self . requirements ( ) . items ( ) : try : query_option = self . query_options . get ( key ) if key == ob_query_field and ob_query_skip : result [ key ] = obsres else : result [ key ] = req . query ( dal , obsres , options = query_option ) except NoResultFound as notfound : req . on_query_not_found ( notfound ) return self . create_input ( * * result )
3,909
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/core/recipes.py#L190-L255
[ "def", "get_page_horz_percentile", "(", "mention", ",", "page_width", "=", "DEFAULT_WIDTH", ",", "page_height", "=", "DEFAULT_HEIGHT", ")", ":", "span", "=", "_to_span", "(", "mention", ")", "return", "bbox_from_span", "(", "span", ")", ".", "left", "/", "page_width" ]
Returns a dictionary with subsets of FileInfo instances from a TXT file .
def subsets_of_fileinfo_from_txt ( filename ) : # check for input file if not os . path . isfile ( filename ) : raise ValueError ( "File " + filename + " not found!" ) # read input file with open ( filename ) as f : file_content = f . read ( ) . splitlines ( ) # obtain the different subsets of files dict_of_subsets_of_fileinfo = { } label = None sublist_of_fileinfo = [ ] idict = 0 ifiles = 0 nfiles = 0 sublist_finished = True for line in file_content : if len ( line ) > 0 : if line [ 0 ] != '#' : if label is None : if line [ 0 ] == "@" : nfiles = int ( line [ 1 : ] . split ( ) [ 0 ] ) label = line [ 1 : ] . split ( ) [ 1 ] sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = False else : raise ValueError ( "Expected @ symbol not found!" ) else : if line [ 0 ] == "@" : raise ValueError ( "Unexpected @ symbol found!" ) tmplist = line . split ( ) tmpfile = tmplist [ 0 ] if len ( tmplist ) > 1 : tmpinfo = tmplist [ 1 : ] else : tmpinfo = None if not os . path . isfile ( tmpfile ) : raise ValueError ( "File " + tmpfile + " not found!" ) sublist_of_fileinfo . append ( FileInfo ( tmpfile , tmpinfo ) ) ifiles += 1 if ifiles == nfiles : dict_of_subsets_of_fileinfo [ idict ] = { } tmpdict = dict_of_subsets_of_fileinfo [ idict ] tmpdict [ 'label' ] = label tmpdict [ 'list_of_fileinfo' ] = sublist_of_fileinfo idict += 1 label = None sublist_of_fileinfo = [ ] ifiles = 0 sublist_finished = True if not sublist_finished : raise ValueError ( "Unexpected end of sublist of files." ) return dict_of_subsets_of_fileinfo
3,910
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/tools/subsets_of_fileinfo_from_txt.py#L10-L95
[ "def", "_wait_for_request", "(", "self", ",", "uuid", ",", "connection_adapter", "=", "None", ")", ":", "start_time", "=", "time", ".", "time", "(", ")", "while", "not", "self", ".", "_response", "[", "uuid", "]", ":", "connection_adapter", ".", "check_for_errors", "(", ")", "if", "time", ".", "time", "(", ")", "-", "start_time", ">", "self", ".", "_timeout", ":", "self", ".", "_raise_rpc_timeout_error", "(", "uuid", ")", "time", ".", "sleep", "(", "IDLE_WAIT", ")" ]
Compute the slice representation of intersection of two arrays .
def subarray_match ( shape , ref , sshape , sref = None ) : # Reference point in im ref1 = asarray ( ref , dtype = 'int' ) if sref is not None : ref2 = asarray ( sref , dtype = 'int' ) else : ref2 = zeros_like ( ref1 ) offset = ref1 - ref2 urc1 = minimum ( offset + asarray ( sshape ) - 1 , asarray ( shape ) - 1 ) blc1 = maximum ( offset , 0 ) urc2 = urc1 - offset blc2 = blc1 - offset def valid_slice ( b , u ) : if b >= u + 1 : return None else : return slice ( b , u + 1 ) f = tuple ( valid_slice ( b , u ) for b , u in zip ( blc1 , urc1 ) ) s = tuple ( valid_slice ( b , u ) for b , u in zip ( blc2 , urc2 ) ) if not all ( f ) or not all ( s ) : return ( None , None ) return ( f , s )
3,911
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L21-L74
[ "def", "_read_weights", "(", "self", ")", ":", "weights", "=", "[", "]", "grams_per_pound", "=", "453.592", "# Read from each of the sensors", "for", "ser", "in", "self", ".", "_serials", ":", "ser", ".", "write", "(", "'W\\r'", ")", "ser", ".", "flush", "(", ")", "time", ".", "sleep", "(", "0.02", ")", "for", "ser", "in", "self", ".", "_serials", ":", "try", ":", "output_str", "=", "ser", ".", "readline", "(", ")", "weight", "=", "float", "(", "output_str", ")", "*", "grams_per_pound", "weights", ".", "append", "(", "weight", ")", "except", ":", "weights", ".", "append", "(", "0.0", ")", "# Log the output", "log_output", "=", "''", "for", "w", "in", "weights", ":", "log_output", "+=", "'{:.2f} '", ".", "format", "(", "w", ")", "rospy", ".", "loginfo", "(", "log_output", ")", "return", "weights" ]
Scale an array to a new shape .
def rebin_scale ( a , scale = 1 ) : newshape = tuple ( ( side * scale ) for side in a . shape ) return rebin ( a , newshape )
3,912
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L171-L176
[ "def", "update_version_descriptor", "(", "self", ",", "task", ",", "releasetype", ",", "descriptor", ",", "verbrowser", ",", "commentbrowser", ")", ":", "if", "task", "is", "None", ":", "null", "=", "treemodel", ".", "TreeItem", "(", "None", ")", "verbrowser", ".", "set_model", "(", "treemodel", ".", "TreeModel", "(", "null", ")", ")", "return", "m", "=", "self", ".", "create_version_model", "(", "task", ",", "releasetype", ",", "descriptor", ")", "verbrowser", ".", "set_model", "(", "m", ")", "commentbrowser", ".", "set_model", "(", "m", ")" ]
Rebin an array to a new shape .
def rebin ( a , newshape ) : slices = [ slice ( 0 , old , float ( old ) / new ) for old , new in zip ( a . shape , newshape ) ] coordinates = numpy . mgrid [ slices ] # choose the biggest smaller integer index indices = coordinates . astype ( 'i' ) return a [ tuple ( indices ) ]
3,913
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L179-L187
[ "def", "_compute_ogg_page_crc", "(", "page", ")", ":", "page_zero_crc", "=", "page", "[", ":", "OGG_FIRST_PAGE_HEADER_CRC_OFFSET", "]", "+", "b\"\\00\"", "*", "OGG_FIRST_PAGE_HEADER_CRC", ".", "size", "+", "page", "[", "OGG_FIRST_PAGE_HEADER_CRC_OFFSET", "+", "OGG_FIRST_PAGE_HEADER_CRC", ".", "size", ":", "]", "return", "ogg_page_crc", "(", "page_zero_crc", ")" ]
Interpolate 2D array data in rows
def fixpix ( data , mask , kind = 'linear' ) : if data . shape != mask . shape : raise ValueError if not numpy . any ( mask ) : return data x = numpy . arange ( 0 , data . shape [ 0 ] ) for row , mrow in zip ( data , mask ) : if numpy . any ( mrow ) : # Interpolate if there's some pixel missing valid = ( mrow == numpy . False_ ) invalid = ( mrow == numpy . True_ ) itp = interp1d ( x [ valid ] , row [ valid ] , kind = kind , copy = False ) row [ invalid ] = itp ( x [ invalid ] ) . astype ( row . dtype ) return data
3,914
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L190-L205
[ "def", "acme_renew_certificates", "(", ")", ":", "for", "csr", "in", "glob", "(", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "'*.csr'", ")", ")", ":", "common_name", "=", "os", ".", "path", ".", "basename", "(", "csr", ")", "common_name", "=", "os", ".", "path", ".", "splitext", "(", "common_name", ")", "[", "0", "]", "certificate_path", "=", "\"{}.crt\"", ".", "format", "(", "common_name", ")", "certificate_path", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "certificate_path", ")", "with", "open", "(", "certificate_path", ")", "as", "file", ":", "crt", "=", "OpenSSL", ".", "crypto", ".", "load_certificate", "(", "OpenSSL", ".", "crypto", ".", "FILETYPE_PEM", ",", "file", ".", "read", "(", ")", ")", "expiration", "=", "crt", ".", "get_notAfter", "(", ")", "expiration", "=", "_parse_asn1_generalized_date", "(", "expiration", ")", "remaining", "=", "expiration", "-", "datetime", ".", "utcnow", "(", ")", "if", "remaining", ">", "timedelta", "(", "days", "=", "30", ")", ":", "print", "\"No need to renew {} ({})\"", ".", "format", "(", "certificate_path", ",", "remaining", ")", "continue", "print", "\"Renewing {} ({})\"", ".", "format", "(", "certificate_path", ",", "remaining", ")", "certificate_request_path", "=", "\"{}.csr\"", ".", "format", "(", "common_name", ")", "certificate_request_path", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "certificate_request_path", ")", "signed_cert", "=", "\"{}-signed.crt\"", ".", "format", "(", "common_name", ")", "signed_cert", "=", "os", ".", "path", ".", "join", "(", "CERTIFICATES_PATH", ",", "signed_cert", ")", "_internal_sign_certificate", "(", "certificate_path", ",", "certificate_request_path", ",", "signed_cert", ")" ]
Substitute pixels in mask by a bilinear least square fitting .
def fixpix2 ( data , mask , iterations = 3 , out = None ) : out = out if out is not None else data . copy ( ) # A binary mask, regions are ones binry = mask != 0 # Label regions in the binary mask lblarr , labl = ndimage . label ( binry ) # Structure for dilation is 8-way stct = ndimage . generate_binary_structure ( 2 , 2 ) # Pixels in the background back = lblarr == 0 # For each object for idx in range ( 1 , labl + 1 ) : # Pixels of the object segm = lblarr == idx # Pixels of the object or the background # dilation will only touch these pixels dilmask = numpy . logical_or ( back , segm ) # Dilation 3 times more = ndimage . binary_dilation ( segm , stct , iterations = iterations , mask = dilmask ) # Border pixels # Pixels in the border around the object are # more and (not segm) border = numpy . logical_and ( more , numpy . logical_not ( segm ) ) # Pixels in the border xi , yi = border . nonzero ( ) # Bilinear leastsq calculator calc = FitOne ( xi , yi , out [ xi , yi ] ) # Pixels in the region xi , yi = segm . nonzero ( ) # Value is obtained from the fit out [ segm ] = calc ( xi , yi ) return out
3,915
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L208-L247
[ "def", "split_volume_from_journal", "(", "citation_elements", ")", ":", "for", "el", "in", "citation_elements", ":", "if", "el", "[", "'type'", "]", "==", "'JOURNAL'", "and", "';'", "in", "el", "[", "'title'", "]", ":", "el", "[", "'title'", "]", ",", "series", "=", "el", "[", "'title'", "]", ".", "rsplit", "(", "';'", ",", "1", ")", "el", "[", "'volume'", "]", "=", "series", "+", "el", "[", "'volume'", "]", "return", "citation_elements" ]
Return x if it is an array or create an array and fill it with x .
def numberarray ( x , shape ) : try : iter ( x ) except TypeError : return numpy . ones ( shape ) * x else : return x
3,916
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/__init__.py#L291-L298
[ "def", "_unbind_topics", "(", "self", ",", "topics", ")", ":", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "status", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "tracing", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "streaming", ")", "self", ".", "client", ".", "unsubscribe", "(", "topics", ".", "response", ")" ]
Returns the token model instance associated with the given request token key . If no user is retrieved AnonymousToken is returned .
def get_token ( request ) : if ( not request . META . get ( header_name_to_django ( auth_token_settings . HEADER_NAME ) ) and config . CHAMBER_MULTIDOMAINS_OVERTAKER_AUTH_COOKIE_NAME ) : ovetaker_auth_token = request . COOKIES . get ( config . CHAMBER_MULTIDOMAINS_OVERTAKER_AUTH_COOKIE_NAME ) token = get_object_or_none ( Token , key = ovetaker_auth_token , is_active = True ) if utils . get_user_from_token ( token ) . is_authenticated ( ) : return token return utils . get_token ( request )
3,917
https://github.com/druids/django-chamber/blob/eef4169923557e96877a664fa254e8c0814f3f23/chamber/multidomains/auth/middleware.py#L13-L25
[ "def", "isrchi", "(", "value", ",", "ndim", ",", "array", ")", ":", "value", "=", "ctypes", ".", "c_int", "(", "value", ")", "ndim", "=", "ctypes", ".", "c_int", "(", "ndim", ")", "array", "=", "stypes", ".", "toIntVector", "(", "array", ")", "return", "libspice", ".", "isrchi_c", "(", "value", ",", "ndim", ",", "array", ")" ]
Lazy set user and token
def process_request ( self , request ) : request . token = get_token ( request ) request . user = SimpleLazyObject ( lambda : get_user ( request ) ) request . _dont_enforce_csrf_checks = dont_enforce_csrf_checks ( request )
3,918
https://github.com/druids/django-chamber/blob/eef4169923557e96877a664fa254e8c0814f3f23/chamber/multidomains/auth/middleware.py#L30-L36
[ "def", "delete_attachments", "(", "self", ",", "volumeID", ",", "attachmentsID", ")", ":", "log", ".", "debug", "(", "\"deleting attachments from volume '{}': {}\"", ".", "format", "(", "volumeID", ",", "attachmentsID", ")", ")", "rawVolume", "=", "self", ".", "_req_raw_volume", "(", "volumeID", ")", "insID", "=", "[", "a", "[", "'id'", "]", "for", "a", "in", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", "]", "# check that all requested file are present", "for", "id", "in", "attachmentsID", ":", "if", "id", "not", "in", "insID", ":", "raise", "NotFoundException", "(", "\"could not found attachment '{}' of the volume '{}'\"", ".", "format", "(", "id", ",", "volumeID", ")", ")", "for", "index", ",", "id", "in", "enumerate", "(", "attachmentsID", ")", ":", "rawVolume", "[", "'_source'", "]", "[", "'_attachments'", "]", ".", "pop", "(", "insID", ".", "index", "(", "id", ")", ")", "self", ".", "_db", ".", "modify_book", "(", "volumeID", ",", "rawVolume", "[", "'_source'", "]", ",", "version", "=", "rawVolume", "[", "'_version'", "]", ")" ]
Auxiliary function to call ximplotxy from a jupyter notebook .
def ximplotxy_jupyter ( x , y , fmt = None , * * args ) : using_jupyter = True if fmt is None : return ximplotxy ( x , y , using_jupyter = using_jupyter , * * args ) else : return ximplotxy ( x , y , fmt , using_jupyter = using_jupyter , * * args )
3,919
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/display/ximplotxy.py#L20-L27
[ "def", "initialize_communities_bucket", "(", ")", ":", "bucket_id", "=", "UUID", "(", "current_app", ".", "config", "[", "'COMMUNITIES_BUCKET_UUID'", "]", ")", "if", "Bucket", ".", "query", ".", "get", "(", "bucket_id", ")", ":", "raise", "FilesException", "(", "\"Bucket with UUID {} already exists.\"", ".", "format", "(", "bucket_id", ")", ")", "else", ":", "storage_class", "=", "current_app", ".", "config", "[", "'FILES_REST_DEFAULT_STORAGE_CLASS'", "]", "location", "=", "Location", ".", "get_default", "(", ")", "bucket", "=", "Bucket", "(", "id", "=", "bucket_id", ",", "location", "=", "location", ",", "default_storage_class", "=", "storage_class", ")", "db", ".", "session", ".", "add", "(", "bucket", ")", "db", ".", "session", ".", "commit", "(", ")" ]
Decorator helper that overrides django atomic decorator and automatically adds create revision .
def atomic ( func ) : try : from reversion . revisions import create_revision return transaction . atomic ( create_revision ( ) ( func ) ) except ImportError : return transaction . atomic ( func )
3,920
https://github.com/druids/django-chamber/blob/eef4169923557e96877a664fa254e8c0814f3f23/chamber/utils/transaction.py#L14-L23
[ "def", "add_cat", "(", "self", ",", "arg", "=", "None", ")", ":", "try", ":", "self", ".", "done_callback", "(", "self", ".", "cat", ")", "self", ".", "visible", "=", "False", "except", "Exception", "as", "e", ":", "self", ".", "validator", ".", "object", "=", "ICONS", "[", "'error'", "]", "raise", "e" ]
Atomic decorator with transaction signals .
def atomic_with_signals ( func ) : try : from reversion . revisions import create_revision return transaction . atomic ( create_revision ( ) ( transaction_signals ( func ) ) ) except ImportError : return transaction . atomic ( transaction_signals ( func ) )
3,921
https://github.com/druids/django-chamber/blob/eef4169923557e96877a664fa254e8c0814f3f23/chamber/utils/transaction.py#L116-L125
[ "def", "list", "(", "cls", ",", "datacenter", "=", "None", ",", "flavor", "=", "None", ",", "match", "=", "''", ",", "exact_match", "=", "False", ")", ":", "if", "not", "datacenter", ":", "dc_ids", "=", "[", "dc", "[", "'id'", "]", "for", "dc", "in", "Datacenter", ".", "filtered_list", "(", ")", "]", "kmap", "=", "{", "}", "for", "dc_id", "in", "dc_ids", ":", "vals", "=", "cls", ".", "safe_call", "(", "'hosting.disk.list_kernels'", ",", "dc_id", ")", "for", "key", "in", "vals", ":", "kmap", ".", "setdefault", "(", "key", ",", "[", "]", ")", ".", "extend", "(", "vals", ".", "get", "(", "key", ",", "[", "]", ")", ")", "# remove duplicates", "for", "key", "in", "kmap", ":", "kmap", "[", "key", "]", "=", "list", "(", "set", "(", "kmap", "[", "key", "]", ")", ")", "else", ":", "dc_id", "=", "Datacenter", ".", "usable_id", "(", "datacenter", ")", "kmap", "=", "cls", ".", "safe_call", "(", "'hosting.disk.list_kernels'", ",", "dc_id", ")", "if", "match", ":", "for", "flav", "in", "kmap", ":", "if", "exact_match", ":", "kmap", "[", "flav", "]", "=", "[", "x", "for", "x", "in", "kmap", "[", "flav", "]", "if", "match", "==", "x", "]", "else", ":", "kmap", "[", "flav", "]", "=", "[", "x", "for", "x", "in", "kmap", "[", "flav", "]", "if", "match", "in", "x", "]", "if", "flavor", ":", "if", "flavor", "not", "in", "kmap", ":", "cls", ".", "error", "(", "'flavor %s not supported here'", "%", "flavor", ")", "return", "dict", "(", "[", "(", "flavor", ",", "kmap", "[", "flavor", "]", ")", "]", ")", "return", "kmap" ]
Parse the XML file using the processor starting from the root of the document .
def parse_from_file ( root_processor , # type: RootProcessor xml_file_path , # type: Text encoding = 'utf-8' # type: Text ) : # type: (...) -> Any with open ( xml_file_path , 'r' , encoding = encoding ) as xml_file : xml_string = xml_file . read ( ) parsed_value = parse_from_string ( root_processor , xml_string ) return parsed_value
3,922
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L264-L284
[ "def", "queue_once_key", "(", "name", ",", "kwargs", ",", "restrict_to", "=", "None", ")", ":", "keys", "=", "[", "'qo'", ",", "force_string", "(", "name", ")", "]", "# Restrict to only the keys allowed in keys.", "if", "restrict_to", "is", "not", "None", ":", "restrict_kwargs", "=", "{", "key", ":", "kwargs", "[", "key", "]", "for", "key", "in", "restrict_to", "}", "keys", "+=", "kwargs_to_list", "(", "restrict_kwargs", ")", "else", ":", "keys", "+=", "kwargs_to_list", "(", "kwargs", ")", "key", "=", "\"_\"", ".", "join", "(", "keys", ")", "return", "key" ]
Parse the XML string using the processor starting from the root of the document .
def parse_from_string ( root_processor , # type: RootProcessor xml_string # type: Text ) : # type: (...) -> Any if not _is_valid_root_processor ( root_processor ) : raise InvalidRootProcessor ( 'Invalid root processor' ) parseable_xml_string = xml_string # type: Union[Text, bytes] if _PY2 and isinstance ( xml_string , Text ) : parseable_xml_string = xml_string . encode ( 'utf-8' ) root = ET . fromstring ( parseable_xml_string ) _xml_namespace_strip ( root ) state = _ProcessorState ( ) state . push_location ( root_processor . element_path ) return root_processor . parse_at_root ( root , state )
3,923
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L287-L311
[ "def", "update_ebounds", "(", "hdu_in", ",", "hdu", "=", "None", ")", ":", "if", "hdu", "is", "None", ":", "hdu", "=", "fits", ".", "BinTableHDU", "(", "data", "=", "hdu_in", ".", "data", ",", "header", "=", "hdu_in", ".", "header", ",", "name", "=", "hdu_in", ".", "name", ")", "else", ":", "for", "col", "in", "[", "'CHANNEL'", ",", "'E_MIN'", ",", "'E_MAX'", "]", ":", "if", "(", "hdu", ".", "data", "[", "col", "]", "!=", "hdu_in", ".", "data", "[", "col", "]", ")", ".", "any", "(", ")", ":", "raise", "ValueError", "(", "\"Energy bounds do not match : %s %s\"", "%", "(", "hdu", ".", "data", "[", "col", "]", ",", "hdu_in", ".", "data", "[", "col", "]", ")", ")", "return", "hdu" ]
Serialize the value to an XML file using the root processor .
def serialize_to_file ( root_processor , # type: RootProcessor value , # type: Any xml_file_path , # type: Text encoding = 'utf-8' , # type: Text indent = None # type: Optional[Text] ) : # type: (...) -> None serialized_value = serialize_to_string ( root_processor , value , indent ) with open ( xml_file_path , 'w' , encoding = encoding ) as xml_file : xml_file . write ( serialized_value )
3,924
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L314-L334
[ "def", "orng_type", "(", "self", ",", "table_name", ",", "col", ")", ":", "mysql_type", "=", "self", ".", "types", "[", "table_name", "]", "[", "col", "]", "n_vals", "=", "len", "(", "self", ".", "db", ".", "col_vals", "[", "table_name", "]", "[", "col", "]", ")", "if", "mysql_type", "in", "OrangeConverter", ".", "continuous_types", "or", "(", "n_vals", ">=", "50", "and", "mysql_type", "in", "OrangeConverter", ".", "integer_types", ")", ":", "return", "'c'", "elif", "mysql_type", "in", "OrangeConverter", ".", "ordinal_types", "+", "OrangeConverter", ".", "integer_types", ":", "return", "'d'", "else", ":", "return", "'string'" ]
Serialize the value to an XML string using the root processor .
def serialize_to_string ( root_processor , # type: RootProcessor value , # type: Any indent = None # type: Optional[Text] ) : # type: (...) -> Text if not _is_valid_root_processor ( root_processor ) : raise InvalidRootProcessor ( 'Invalid root processor' ) state = _ProcessorState ( ) state . push_location ( root_processor . element_path ) root = root_processor . serialize ( value , state ) state . pop_location ( ) # Always encode to UTF-8 because element tree does not support other # encodings in earlier Python versions. See: https://bugs.python.org/issue1767933 serialized_value = ET . tostring ( root , encoding = 'utf-8' ) # Since element tree does not support pretty printing XML, we use minidom to do the pretty # printing if indent : serialized_value = minidom . parseString ( serialized_value ) . toprettyxml ( indent = indent , encoding = 'utf-8' ) return serialized_value . decode ( 'utf-8' )
3,925
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L337-L371
[ "def", "find_best_frametype", "(", "channel", ",", "start", ",", "end", ",", "frametype_match", "=", "None", ",", "allow_tape", "=", "True", ",", "connection", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "try", ":", "return", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'error'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "except", "RuntimeError", ":", "# gaps (or something else went wrong)", "ftout", "=", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "return_all", "=", "True", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'ignore'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "try", ":", "if", "isinstance", "(", "ftout", ",", "dict", ")", ":", "return", "{", "key", ":", "ftout", "[", "key", "]", "[", "0", "]", "for", "key", "in", "ftout", "}", "return", "ftout", "[", "0", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Cannot find any valid frametypes for channel(s)\"", ")" ]
Create an array processor that can be used to parse and serialize array data .
def array ( item_processor , # type: Processor alias = None , # type: Optional[Text] nested = None , # type: Optional[Text] omit_empty = False , # type: bool hooks = None # type: Optional[Hooks] ) : # type: (...) -> RootProcessor processor = _Array ( item_processor , alias , nested , omit_empty ) return _processor_wrap_if_hooks ( processor , hooks )
3,926
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L374-L430
[ "def", "mbar_W_nk", "(", "u_kn", ",", "N_k", ",", "f_k", ")", ":", "return", "np", ".", "exp", "(", "mbar_log_W_nk", "(", "u_kn", ",", "N_k", ",", "f_k", ")", ")" ]
Create a processor for boolean values .
def boolean ( element_name , # type: Text attribute = None , # type: Optional[Text] required = True , # type: bool alias = None , # type: Optional[Text] default = False , # type: Optional[bool] omit_empty = False , # type: bool hooks = None # type: Optional[Hooks] ) : # type: (...) -> Processor return _PrimitiveValue ( element_name , _parse_boolean , attribute , required , alias , default , omit_empty , hooks )
3,927
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L433-L473
[ "def", "save_reg", "(", "data", ")", ":", "reg_dir", "=", "_reg_dir", "(", ")", "regfile", "=", "os", ".", "path", ".", "join", "(", "reg_dir", ",", "'register'", ")", "try", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "reg_dir", ")", ":", "os", ".", "makedirs", "(", "reg_dir", ")", "except", "OSError", "as", "exc", ":", "if", "exc", ".", "errno", "==", "errno", ".", "EEXIST", ":", "pass", "else", ":", "raise", "try", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "regfile", ",", "'a'", ")", "as", "fh_", ":", "salt", ".", "utils", ".", "msgpack", ".", "dump", "(", "data", ",", "fh_", ")", "except", "Exception", ":", "log", ".", "error", "(", "'Could not write to msgpack file %s'", ",", "__opts__", "[", "'outdir'", "]", ")", "raise" ]
Create a processor for dictionary values .
def dictionary ( element_name , # type: Text children , # type: List[Processor] required = True , # type: bool alias = None , # type: Optional[Text] hooks = None # type: Optional[Hooks] ) : # type: (...) -> RootProcessor processor = _Dictionary ( element_name , children , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
3,928
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L476-L499
[ "def", "check_imc_creds", "(", "auth", ",", "url", ")", ":", "test_url", "=", "'/imcrs'", "f_url", "=", "url", "+", "test_url", "try", ":", "response", "=", "requests", ".", "get", "(", "f_url", ",", "auth", "=", "auth", ",", "headers", "=", "HEADERS", ",", "verify", "=", "False", ")", "return", "bool", "(", "response", ".", "status_code", "==", "200", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "error", ":", "return", "\"Error:\\n\"", "+", "str", "(", "error", ")", "+", "\" test_imc_creds: An Error has occured\"" ]
Create a processor for floating point values .
def floating_point ( element_name , # type: Text attribute = None , # type: Optional[Text] required = True , # type: bool alias = None , # type: Optional[Text] default = 0.0 , # type: Optional[float] omit_empty = False , # type: bool hooks = None # type: Optional[Hooks] ) : # type: (...) -> Processor value_parser = _number_parser ( float ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
3,929
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L502-L527
[ "def", "compute_date_range_chunks", "(", "sessions", ",", "start_date", ",", "end_date", ",", "chunksize", ")", ":", "if", "start_date", "not", "in", "sessions", ":", "raise", "KeyError", "(", "\"Start date %s is not found in calendar.\"", "%", "(", "start_date", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", ",", ")", ")", "if", "end_date", "not", "in", "sessions", ":", "raise", "KeyError", "(", "\"End date %s is not found in calendar.\"", "%", "(", "end_date", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", ",", ")", ")", "if", "end_date", "<", "start_date", ":", "raise", "ValueError", "(", "\"End date %s cannot precede start date %s.\"", "%", "(", "end_date", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", ",", "start_date", ".", "strftime", "(", "\"%Y-%m-%d\"", ")", ")", ")", "if", "chunksize", "is", "None", ":", "return", "[", "(", "start_date", ",", "end_date", ")", "]", "start_ix", ",", "end_ix", "=", "sessions", ".", "slice_locs", "(", "start_date", ",", "end_date", ")", "return", "(", "(", "r", "[", "0", "]", ",", "r", "[", "-", "1", "]", ")", "for", "r", "in", "partition_all", "(", "chunksize", ",", "sessions", "[", "start_ix", ":", "end_ix", "]", ")", ")" ]
Create a processor for integer values .
def integer ( element_name , # type: Text attribute = None , # type: Optional[Text] required = True , # type: bool alias = None , # type: Optional[Text] default = 0 , # type: Optional[int] omit_empty = False , # type: bool hooks = None # type: Optional[Hooks] ) : # type: (...) -> Processor value_parser = _number_parser ( int ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
3,930
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L530-L555
[ "def", "ostree_compose", "(", "self", ",", "release", ")", ":", "start", "=", "datetime", ".", "utcnow", "(", ")", "treefile", "=", "os", ".", "path", ".", "join", "(", "release", "[", "'git_dir'", "]", ",", "'treefile.json'", ")", "cmd", "=", "release", "[", "'ostree_compose'", "]", "%", "treefile", "with", "file", "(", "treefile", ",", "'w'", ")", "as", "tree", ":", "json", ".", "dump", "(", "release", "[", "'treefile'", "]", ",", "tree", ")", "# Only use new_chroot for the invocation, as --clean and --new-chroot are buggy together right now", "out", ",", "err", ",", "rcode", "=", "self", ".", "mock_chroot", "(", "release", ",", "cmd", ",", "new_chroot", "=", "True", ")", "ref", "=", "None", "commitid", "=", "None", "for", "line", "in", "out", ".", "split", "(", "'\\n'", ")", ":", "if", "' => '", "in", "line", ":", "# This line is the: ref => commitid line", "line", "=", "line", ".", "replace", "(", "'\\n'", ",", "''", ")", "ref", ",", "_", ",", "commitid", "=", "line", ".", "partition", "(", "' => '", ")", "self", ".", "log", ".", "info", "(", "'rpm-ostree compose complete (%s), ref %s, commitid %s'", ",", "datetime", ".", "utcnow", "(", ")", "-", "start", ",", "ref", ",", "commitid", ")", "return", "ref", ",", "commitid" ]
Create a processor for namedtuple values .
def named_tuple ( element_name , # type: Text tuple_type , # type: Type[Tuple] child_processors , # type: List[Processor] required = True , # type: bool alias = None , # type: Optional[Text] hooks = None # type: Optional[Hooks] ) : # type: (...) -> RootProcessor converter = _named_tuple_converter ( tuple_type ) processor = _Aggregate ( element_name , converter , child_processors , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
3,931
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L558-L576
[ "def", "is_labial", "(", "c", ",", "lang", ")", ":", "o", "=", "get_offset", "(", "c", ",", "lang", ")", "return", "(", "o", ">=", "LABIAL_RANGE", "[", "0", "]", "and", "o", "<=", "LABIAL_RANGE", "[", "1", "]", ")" ]
Create a processor for string values .
def string ( element_name , # type: Text attribute = None , # type: Optional[Text] required = True , # type: bool alias = None , # type: Optional[Text] default = '' , # type: Optional[Text] omit_empty = False , # type: bool strip_whitespace = True , # type: bool hooks = None # type: Optional[Hooks] ) : # type: (...) -> Processor value_parser = _string_parser ( strip_whitespace ) return _PrimitiveValue ( element_name , value_parser , attribute , required , alias , default , omit_empty , hooks )
3,932
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L579-L608
[ "def", "union", "(", "self", ",", "rdds", ")", ":", "first_jrdd_deserializer", "=", "rdds", "[", "0", "]", ".", "_jrdd_deserializer", "if", "any", "(", "x", ".", "_jrdd_deserializer", "!=", "first_jrdd_deserializer", "for", "x", "in", "rdds", ")", ":", "rdds", "=", "[", "x", ".", "_reserialize", "(", ")", "for", "x", "in", "rdds", "]", "cls", "=", "SparkContext", ".", "_jvm", ".", "org", ".", "apache", ".", "spark", ".", "api", ".", "java", ".", "JavaRDD", "jrdds", "=", "SparkContext", ".", "_gateway", ".", "new_array", "(", "cls", ",", "len", "(", "rdds", ")", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "rdds", ")", ")", ":", "jrdds", "[", "i", "]", "=", "rdds", "[", "i", "]", ".", "_jrdd", "return", "RDD", "(", "self", ".", "_jsc", ".", "union", "(", "jrdds", ")", ",", "self", ",", "rdds", "[", "0", "]", ".", "_jrdd_deserializer", ")" ]
Create a processor for user objects .
def user_object ( element_name , # type: Text cls , # type: Type[Any] child_processors , # type: List[Processor] required = True , # type: bool alias = None , # type: Optional[Text] hooks = None # type: Optional[Hooks] ) : # type: (...) -> RootProcessor converter = _user_object_converter ( cls ) processor = _Aggregate ( element_name , converter , child_processors , required , alias ) return _processor_wrap_if_hooks ( processor , hooks )
3,933
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L611-L629
[ "def", "run", "(", "wrapped", ")", ":", "@", "wraps", "(", "wrapped", ")", "def", "_run", "(", "self", ",", "query", ",", "bindings", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_reconnect_if_missing_connection", "(", ")", "start", "=", "time", ".", "time", "(", ")", "try", ":", "result", "=", "wrapped", "(", "self", ",", "query", ",", "bindings", ",", "*", "args", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "e", ":", "result", "=", "self", ".", "_try_again_if_caused_by_lost_connection", "(", "e", ",", "query", ",", "bindings", ",", "wrapped", ")", "t", "=", "self", ".", "_get_elapsed_time", "(", "start", ")", "self", ".", "log_query", "(", "query", ",", "bindings", ",", "t", ")", "return", "result", "return", "_run" ]
Append the list of element names as a path to the provided start element .
def _element_append_path ( start_element , # type: ET.Element element_names # type: Iterable[Text] ) : # type: (...) -> ET.Element end_element = start_element for element_name in element_names : new_element = ET . Element ( element_name ) end_element . append ( new_element ) end_element = new_element return end_element
3,934
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1390-L1406
[ "def", "remove_armor", "(", "armored_data", ")", ":", "stream", "=", "io", ".", "BytesIO", "(", "armored_data", ")", "lines", "=", "stream", ".", "readlines", "(", ")", "[", "3", ":", "-", "1", "]", "data", "=", "base64", ".", "b64decode", "(", "b''", ".", "join", "(", "lines", ")", ")", "payload", ",", "checksum", "=", "data", "[", ":", "-", "3", "]", ",", "data", "[", "-", "3", ":", "]", "assert", "util", ".", "crc24", "(", "payload", ")", "==", "checksum", "return", "payload" ]
Find the element specified by the given path starting from the root element of the document .
def _element_find_from_root ( root , # type: ET.Element element_path # type: Text ) : # type: (...) -> Optional[ET.Element] element = None element_names = element_path . split ( '/' ) if element_names [ 0 ] == root . tag : if len ( element_names ) > 1 : element = root . find ( '/' . join ( element_names [ 1 : ] ) ) else : element = root return element
3,935
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1409-L1429
[ "def", "make_random_models_table", "(", "n_sources", ",", "param_ranges", ",", "random_state", "=", "None", ")", ":", "prng", "=", "check_random_state", "(", "random_state", ")", "sources", "=", "Table", "(", ")", "for", "param_name", ",", "(", "lower", ",", "upper", ")", "in", "param_ranges", ".", "items", "(", ")", ":", "# Generate a column for every item in param_ranges, even if it", "# is not in the model (e.g. flux). However, such columns will", "# be ignored when rendering the image.", "sources", "[", "param_name", "]", "=", "prng", ".", "uniform", "(", "lower", ",", "upper", ",", "n_sources", ")", "return", "sources" ]
Ensure all elements specified in the given path relative to the provided parent element exist .
def _element_get_or_add_from_parent ( parent , # type: ET.Element element_path # type: Text ) : # type: (...) -> ET.Element element_names = element_path . split ( '/' ) # Starting from the parent, walk the element path until we find the first element in the path # that does not exist. Create that element and all the elements following it in the path. If # all elements along the path exist, then we will simply walk the full path to the final # element we want to return. existing_element = None previous_element = parent for i , element_name in enumerate ( element_names ) : existing_element = previous_element . find ( element_name ) if existing_element is None : existing_element = _element_append_path ( previous_element , element_names [ i : ] ) break previous_element = existing_element assert existing_element is not None return existing_element
3,936
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1432-L1460
[ "def", "refresh", "(", "self", ",", "refresh_token", ")", ":", "r", "=", "requests", ".", "post", "(", "self", ".", "apiurl", "+", "\"/token\"", ",", "params", "=", "{", "\"grant_type\"", ":", "\"refresh_token\"", ",", "\"client_id\"", ":", "self", ".", "cid", ",", "\"client_secret\"", ":", "self", ".", "csecret", ",", "\"refresh_token\"", ":", "refresh_token", "}", ")", "if", "r", ".", "status_code", "!=", "200", ":", "raise", "ServerError", "jsd", "=", "r", ".", "json", "(", ")", "return", "jsd", "[", "'access_token'", "]", ",", "int", "(", "jsd", "[", "'expires_in'", "]", ")", "+", "int", "(", "jsd", "[", "'created_at'", "]", ")" ]
Create an entirely new element path .
def _element_path_create_new ( element_path ) : # type: (Text) -> Tuple[ET.Element, ET.Element] element_names = element_path . split ( '/' ) start_element = ET . Element ( element_names [ 0 ] ) end_element = _element_append_path ( start_element , element_names [ 1 : ] ) return start_element , end_element
3,937
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1463-L1476
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Apply the after parse hook .
def _hooks_apply_after_parse ( hooks , # type: Optional[Hooks] state , # type: _ProcessorState value # type: Any ) : # type: (...) -> Any if hooks and hooks . after_parse : return hooks . after_parse ( ProcessorStateView ( state ) , value ) return value
3,938
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1479-L1489
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Apply the before serialize hook .
def _hooks_apply_before_serialize ( hooks , # type: Optional[Hooks] state , # type: _ProcessorState value # type: Any ) : # type: (...) -> Any if hooks and hooks . before_serialize : return hooks . before_serialize ( ProcessorStateView ( state ) , value ) return value
3,939
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1492-L1502
[ "def", "fromtif", "(", "path", ",", "ext", "=", "'tif'", ",", "start", "=", "None", ",", "stop", "=", "None", ",", "recursive", "=", "False", ",", "nplanes", "=", "None", ",", "npartitions", "=", "None", ",", "labels", "=", "None", ",", "engine", "=", "None", ",", "credentials", "=", "None", ",", "discard_extra", "=", "False", ")", ":", "from", "tifffile", "import", "TiffFile", "if", "nplanes", "is", "not", "None", "and", "nplanes", "<=", "0", ":", "raise", "ValueError", "(", "'nplanes must be positive if passed, got %d'", "%", "nplanes", ")", "def", "getarray", "(", "idx_buffer_filename", ")", ":", "idx", ",", "buf", ",", "fname", "=", "idx_buffer_filename", "fbuf", "=", "BytesIO", "(", "buf", ")", "tfh", "=", "TiffFile", "(", "fbuf", ")", "ary", "=", "tfh", ".", "asarray", "(", ")", "pageCount", "=", "ary", ".", "shape", "[", "0", "]", "if", "nplanes", "is", "not", "None", ":", "extra", "=", "pageCount", "%", "nplanes", "if", "extra", ":", "if", "discard_extra", ":", "pageCount", "=", "pageCount", "-", "extra", "logging", ".", "getLogger", "(", "'thunder'", ")", ".", "warn", "(", "'Ignored %d pages in file %s'", "%", "(", "extra", ",", "fname", ")", ")", "else", ":", "raise", "ValueError", "(", "\"nplanes '%d' does not evenly divide '%d in file %s'\"", "%", "(", "nplanes", ",", "pageCount", ",", "fname", ")", ")", "values", "=", "[", "ary", "[", "i", ":", "(", "i", "+", "nplanes", ")", "]", "for", "i", "in", "range", "(", "0", ",", "pageCount", ",", "nplanes", ")", "]", "else", ":", "values", "=", "[", "ary", "]", "tfh", ".", "close", "(", ")", "if", "ary", ".", "ndim", "==", "3", ":", "values", "=", "[", "val", ".", "squeeze", "(", ")", "for", "val", "in", "values", "]", "nvals", "=", "len", "(", "values", ")", "keys", "=", "[", "(", "idx", "*", "nvals", "+", "timepoint", ",", ")", "for", "timepoint", "in", "range", "(", "nvals", ")", "]", "return", "zip", "(", "keys", ",", "values", ")", "recount", "=", "False", "if", "nplanes", "is", "None", "else", "True", "data", "=", "frompath", "(", "path", ",", "accessor", "=", "getarray", ",", "ext", "=", "ext", ",", "start", "=", "start", ",", "stop", "=", "stop", ",", "recursive", "=", "recursive", ",", "npartitions", "=", "npartitions", ",", "recount", "=", "recount", ",", "labels", "=", "labels", ",", "engine", "=", "engine", ",", "credentials", "=", "credentials", ")", "if", "engine", "is", "not", "None", "and", "npartitions", "is", "not", "None", "and", "data", ".", "npartitions", "(", ")", "<", "npartitions", ":", "data", "=", "data", ".", "repartition", "(", "npartitions", ")", "return", "data" ]
Return an _AggregateConverter for named tuples of the given type .
def _named_tuple_converter ( tuple_type ) : # type: (Type[Tuple]) -> _AggregateConverter def _from_dict ( dict_value ) : if dict_value : return tuple_type ( * * dict_value ) # Cannot construct a namedtuple value from an empty dictionary return None def _to_dict ( value ) : if value : return value . _asdict ( ) return { } converter = _AggregateConverter ( from_dict = _from_dict , to_dict = _to_dict ) return converter
3,940
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1511-L1528
[ "def", "set_content_length", "(", "self", ")", ":", "if", "not", "self", ".", "block_file", ":", "self", ".", "fields", "[", "'Content-Length'", "]", "=", "'0'", "return", "with", "wpull", ".", "util", ".", "reset_file_offset", "(", "self", ".", "block_file", ")", ":", "wpull", ".", "util", ".", "seek_file_end", "(", "self", ".", "block_file", ")", "self", ".", "fields", "[", "'Content-Length'", "]", "=", "str", "(", "self", ".", "block_file", ".", "tell", "(", ")", ")" ]
Return a function to parse numbers .
def _number_parser ( str_to_number_func ) : def _parse_number_value ( element_text , state ) : value = None try : value = str_to_number_func ( element_text ) except ( ValueError , TypeError ) : state . raise_error ( InvalidPrimitiveValue , 'Invalid numeric value "{}"' . format ( element_text ) ) return value return _parse_number_value
3,941
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1531-L1544
[ "def", "getStates", "(", "self", ")", ":", "self", ".", "bLvlNow", "=", "self", ".", "Rfree", "*", "self", ".", "aLvlNow", "self", ".", "mLvlNow", "=", "self", ".", "bLvlNow", "+", "self", ".", "eStateNow" ]
Parse the raw XML string as a boolean value .
def _parse_boolean ( element_text , state ) : value = None lowered_text = element_text . lower ( ) if lowered_text == 'true' : value = True elif lowered_text == 'false' : value = False else : state . raise_error ( InvalidPrimitiveValue , 'Invalid boolean value "{}"' . format ( element_text ) ) return value
3,942
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1547-L1559
[ "def", "find_best_frametype", "(", "channel", ",", "start", ",", "end", ",", "frametype_match", "=", "None", ",", "allow_tape", "=", "True", ",", "connection", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "try", ":", "return", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'error'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "except", "RuntimeError", ":", "# gaps (or something else went wrong)", "ftout", "=", "find_frametype", "(", "channel", ",", "gpstime", "=", "(", "start", ",", "end", ")", ",", "frametype_match", "=", "frametype_match", ",", "return_all", "=", "True", ",", "allow_tape", "=", "allow_tape", ",", "on_gaps", "=", "'ignore'", ",", "connection", "=", "connection", ",", "host", "=", "host", ",", "port", "=", "port", ")", "try", ":", "if", "isinstance", "(", "ftout", ",", "dict", ")", ":", "return", "{", "key", ":", "ftout", "[", "key", "]", "[", "0", "]", "for", "key", "in", "ftout", "}", "return", "ftout", "[", "0", "]", "except", "IndexError", ":", "raise", "ValueError", "(", "\"Cannot find any valid frametypes for channel(s)\"", ")" ]
Return a parser function for parsing string values .
def _string_parser ( strip_whitespace ) : def _parse_string_value ( element_text , _state ) : if element_text is None : value = '' elif strip_whitespace : value = element_text . strip ( ) else : value = element_text return value return _parse_string_value
3,943
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1574-L1586
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Return an _AggregateConverter for a user object of the given class .
def _user_object_converter ( cls ) : # type: (Type[Any]) -> _AggregateConverter def _from_dict ( dict_value ) : try : object_value = cls ( * * dict_value ) except TypeError : # Constructor does not support keyword arguments, try setting each # field individually. object_value = cls ( ) for field_name , field_value in dict_value . items ( ) : setattr ( object_value , field_name , field_value ) return object_value def _to_dict ( value ) : if value : return value . __dict__ return { } return _AggregateConverter ( from_dict = _from_dict , to_dict = _to_dict )
3,944
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1589-L1610
[ "def", "set_content_length", "(", "self", ")", ":", "if", "not", "self", ".", "block_file", ":", "self", ".", "fields", "[", "'Content-Length'", "]", "=", "'0'", "return", "with", "wpull", ".", "util", ".", "reset_file_offset", "(", "self", ".", "block_file", ")", ":", "wpull", ".", "util", ".", "seek_file_end", "(", "self", ".", "block_file", ")", "self", ".", "fields", "[", "'Content-Length'", "]", "=", "str", "(", "self", ".", "block_file", ".", "tell", "(", ")", ")" ]
Strip the XML namespace prefix from all element tags under the given root Element .
def _xml_namespace_strip ( root ) : # type: (ET.Element) -> None if '}' not in root . tag : return # Nothing to do, no namespace present for element in root . iter ( ) : if '}' in element . tag : element . tag = element . tag . split ( '}' ) [ 1 ] else : # pragma: no cover # We should never get here. If there is a namespace, then the namespace should be # included in all elements. pass
3,945
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1613-L1625
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Parse the provided element as an aggregate .
def parse_at_element ( self , element , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any parsed_dict = self . _dictionary . parse_at_element ( element , state ) return self . _converter . from_dict ( parsed_dict )
3,946
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L678-L686
[ "def", "get_kafka_ssl_context", "(", ")", ":", "# NOTE: We assume that Kafka environment variables are present. If using", "# Apache Kafka on Heroku, they will be available in your app configuration.", "#", "# 1. Write the PEM certificates necessary for connecting to the Kafka brokers to physical", "# files. The broker connection SSL certs are passed in environment/config variables and", "# the python and ssl libraries require them in physical files. The public keys are written", "# to short lived NamedTemporaryFile files; the client key is encrypted before writing to", "# the short lived NamedTemporaryFile", "#", "# 2. Create and return an SSLContext for connecting to the Kafka brokers referencing the", "# PEM certificates written above", "#", "# stash the kafka certs in named temporary files for loading into SSLContext. Initialize the", "# SSLContext inside the with so when it goes out of scope the files are removed which has them", "# existing for the shortest amount of time. As extra caution password", "# protect/encrypt the client key", "with", "NamedTemporaryFile", "(", "suffix", "=", "'.crt'", ")", "as", "cert_file", ",", "NamedTemporaryFile", "(", "suffix", "=", "'.key'", ")", "as", "key_file", ",", "NamedTemporaryFile", "(", "suffix", "=", "'.crt'", ")", "as", "trust_file", ":", "cert_file", ".", "write", "(", "os", ".", "environ", "[", "'KAFKA_CLIENT_CERT'", "]", ".", "encode", "(", "'utf-8'", ")", ")", "cert_file", ".", "flush", "(", ")", "# setup cryptography to password encrypt/protect the client key so it's not in the clear on", "# the filesystem. Use the generated password in the call to load_cert_chain", "passwd", "=", "standard_b64encode", "(", "os", ".", "urandom", "(", "33", ")", ")", "private_key", "=", "serialization", ".", "load_pem_private_key", "(", "os", ".", "environ", "[", "'KAFKA_CLIENT_CERT_KEY'", "]", ".", "encode", "(", "'utf-8'", ")", ",", "password", "=", "None", ",", "backend", "=", "default_backend", "(", ")", ")", "pem", "=", "private_key", ".", "private_bytes", "(", "encoding", "=", "serialization", ".", "Encoding", ".", "PEM", ",", "format", "=", "serialization", ".", "PrivateFormat", ".", "PKCS8", ",", "encryption_algorithm", "=", "serialization", ".", "BestAvailableEncryption", "(", "passwd", ")", ")", "key_file", ".", "write", "(", "pem", ")", "key_file", ".", "flush", "(", ")", "trust_file", ".", "write", "(", "os", ".", "environ", "[", "'KAFKA_TRUSTED_CERT'", "]", ".", "encode", "(", "'utf-8'", ")", ")", "trust_file", ".", "flush", "(", ")", "# create an SSLContext for passing into the kafka provider using the create_default_context", "# function which creates an SSLContext with protocol set to PROTOCOL_SSLv23, OP_NO_SSLv2,", "# and OP_NO_SSLv3 when purpose=SERVER_AUTH.", "ssl_context", "=", "ssl", ".", "create_default_context", "(", "purpose", "=", "ssl", ".", "Purpose", ".", "SERVER_AUTH", ",", "cafile", "=", "trust_file", ".", "name", ")", "ssl_context", ".", "load_cert_chain", "(", "cert_file", ".", "name", ",", "keyfile", "=", "key_file", ".", "name", ",", "password", "=", "passwd", ")", "# Intentionally disabling hostname checking. The Kafka cluster runs in the cloud and Apache", "# Kafka on Heroku doesn't currently provide stable hostnames. We're pinned to a specific certificate", "# for this connection even though the certificate doesn't include host information. We rely", "# on the ca trust_cert for this purpose.", "ssl_context", ".", "check_hostname", "=", "False", "return", "ssl_context" ]
Parse the root XML element as an aggregate .
def parse_at_root ( self , root , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any parsed_dict = self . _dictionary . parse_at_root ( root , state ) return self . _converter . from_dict ( parsed_dict )
3,947
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L688-L696
[ "def", "alloc_retrieve_subnet_info", "(", "self", ",", "tenant_id", ",", "direc", ")", ":", "serv_obj", "=", "self", ".", "get_service_obj", "(", "tenant_id", ")", "subnet_dict", "=", "self", ".", "retrieve_dcnm_subnet_info", "(", "tenant_id", ",", "direc", ")", "if", "subnet_dict", ":", "return", "subnet_dict", "ip_subnet_dict", "=", "self", ".", "get_next_ip", "(", "tenant_id", ",", "direc", ")", "subnet_dict", "=", "self", ".", "fill_dcnm_subnet_info", "(", "tenant_id", ",", "ip_subnet_dict", ".", "get", "(", "'subnet'", ")", ",", "ip_subnet_dict", ".", "get", "(", "'start'", ")", ",", "ip_subnet_dict", ".", "get", "(", "'end'", ")", ",", "ip_subnet_dict", ".", "get", "(", "'gateway'", ")", ",", "ip_subnet_dict", ".", "get", "(", "'sec_gateway'", ")", ",", "direc", ")", "serv_obj", ".", "store_dcnm_subnet_dict", "(", "subnet_dict", ",", "direc", ")", "return", "subnet_dict" ]
Parse the aggregate from the provided parent XML element .
def parse_from_parent ( self , parent , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any parsed_dict = self . _dictionary . parse_from_parent ( parent , state ) return self . _converter . from_dict ( parsed_dict )
3,948
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L698-L706
[ "def", "prepare_notebook_context", "(", "request", ",", "notebook_context", ")", ":", "if", "not", "notebook_context", ":", "notebook_context", "=", "{", "}", "# Override notebook Jinja templates", "if", "\"extra_template_paths\"", "not", "in", "notebook_context", ":", "notebook_context", "[", "\"extra_template_paths\"", "]", "=", "[", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"server\"", ",", "\"templates\"", ")", "]", "# Furious invalid state follows if we let this slip through", "assert", "type", "(", "notebook_context", "[", "\"extra_template_paths\"", "]", ")", "==", "list", ",", "\"Got bad extra_template_paths {}\"", ".", "format", "(", "notebook_context", "[", "\"extra_template_paths\"", "]", ")", "# Jinja variables", "notebook_context", "[", "\"jinja_environment_options\"", "]", "=", "notebook_context", ".", "get", "(", "\"jinja_environment_options\"", ",", "{", "}", ")", "assert", "type", "(", "notebook_context", "[", "\"jinja_environment_options\"", "]", ")", "==", "dict", "# XXX: Following passing of global variables to Jinja templates requires Jinja 2.8.0dev+ version and is not yet supported", "# http://jinja.pocoo.org/docs/dev/api/#jinja2.Environment.globals", "# notebook_context[\"jinja_environment_options\"][\"globals\"] = notebook_context[\"jinja_environment_options\"].get(\"globals\", {})", "# globals_ = notebook_context[\"jinja_environment_options\"][\"globals\"]", "#", "# assert type(globals_) == dict", "#", "# if not \"home_url\" in globals_:", "# globals_[\"home_url\"] = request.host_url", "#", "# if not \"home_title\" in globals_:", "# globals_[\"home_title\"] = \"Back to site\"", "# Tell notebook to correctly address WebSockets allow origin policy", "notebook_context", "[", "\"allow_origin\"", "]", "=", "route_to_alt_domain", "(", "request", ",", "request", ".", "host_url", ")", "notebook_context", "[", "\"notebook_path\"", "]", "=", "request", ".", "route_path", "(", "\"notebook_proxy\"", ",", "remainder", "=", "\"\"", ")", "# Record the hash of the current parameters, so we know if this user accesses the notebook in this or different context", "if", "\"context_hash\"", "not", "in", "notebook_context", ":", "notebook_context", "[", "\"context_hash\"", "]", "=", "make_dict_hash", "(", "notebook_context", ")", "print", "(", "notebook_context", ")" ]
Serialize the value to a new element and returns the element .
def serialize ( self , value , # type: Any state # type: _ProcessorState ) : # type: (...) -> ET.Element dict_value = self . _converter . to_dict ( value ) return self . _dictionary . serialize ( dict_value , state )
3,949
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L708-L716
[ "def", "get", "(", "self", ",", "accountID", ",", "*", "*", "kwargs", ")", ":", "request", "=", "Request", "(", "'GET'", ",", "'/v3/accounts/{accountID}/pricing'", ")", "request", ".", "set_path_param", "(", "'accountID'", ",", "accountID", ")", "request", ".", "set_param", "(", "'instruments'", ",", "kwargs", ".", "get", "(", "'instruments'", ")", ")", "request", ".", "set_param", "(", "'since'", ",", "kwargs", ".", "get", "(", "'since'", ")", ")", "request", ".", "set_param", "(", "'includeUnitsAvailable'", ",", "kwargs", ".", "get", "(", "'includeUnitsAvailable'", ")", ")", "request", ".", "set_param", "(", "'includeHomeConversions'", ",", "kwargs", ".", "get", "(", "'includeHomeConversions'", ")", ")", "response", "=", "self", ".", "ctx", ".", "request", "(", "request", ")", "if", "response", ".", "content_type", "is", "None", ":", "return", "response", "if", "not", "response", ".", "content_type", ".", "startswith", "(", "\"application/json\"", ")", ":", "return", "response", "jbody", "=", "json", ".", "loads", "(", "response", ".", "raw_body", ")", "parsed_body", "=", "{", "}", "#", "# Parse responses as defined by the API specification", "#", "if", "str", "(", "response", ".", "status", ")", "==", "\"200\"", ":", "if", "jbody", ".", "get", "(", "'prices'", ")", "is", "not", "None", ":", "parsed_body", "[", "'prices'", "]", "=", "[", "self", ".", "ctx", ".", "pricing", ".", "ClientPrice", ".", "from_dict", "(", "d", ",", "self", ".", "ctx", ")", "for", "d", "in", "jbody", ".", "get", "(", "'prices'", ")", "]", "if", "jbody", ".", "get", "(", "'homeConversions'", ")", "is", "not", "None", ":", "parsed_body", "[", "'homeConversions'", "]", "=", "[", "self", ".", "ctx", ".", "pricing", ".", "HomeConversions", ".", "from_dict", "(", "d", ",", "self", ".", "ctx", ")", "for", "d", "in", "jbody", ".", "get", "(", "'homeConversions'", ")", "]", "if", "jbody", ".", "get", "(", "'time'", ")", "is", "not", "None", ":", "parsed_body", "[", "'time'", "]", "=", "jbody", ".", "get", "(", "'time'", ")", "elif", "str", "(", "response", ".", "status", ")", "==", "\"400\"", ":", "if", "jbody", ".", "get", "(", "'errorCode'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorCode'", "]", "=", "jbody", ".", "get", "(", "'errorCode'", ")", "if", "jbody", ".", "get", "(", "'errorMessage'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorMessage'", "]", "=", "jbody", ".", "get", "(", "'errorMessage'", ")", "elif", "str", "(", "response", ".", "status", ")", "==", "\"401\"", ":", "if", "jbody", ".", "get", "(", "'errorCode'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorCode'", "]", "=", "jbody", ".", "get", "(", "'errorCode'", ")", "if", "jbody", ".", "get", "(", "'errorMessage'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorMessage'", "]", "=", "jbody", ".", "get", "(", "'errorMessage'", ")", "elif", "str", "(", "response", ".", "status", ")", "==", "\"404\"", ":", "if", "jbody", ".", "get", "(", "'errorCode'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorCode'", "]", "=", "jbody", ".", "get", "(", "'errorCode'", ")", "if", "jbody", ".", "get", "(", "'errorMessage'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorMessage'", "]", "=", "jbody", ".", "get", "(", "'errorMessage'", ")", "elif", "str", "(", "response", ".", "status", ")", "==", "\"405\"", ":", "if", "jbody", ".", "get", "(", "'errorCode'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorCode'", "]", "=", "jbody", ".", "get", "(", "'errorCode'", ")", "if", "jbody", ".", "get", "(", "'errorMessage'", ")", "is", "not", "None", ":", "parsed_body", "[", "'errorMessage'", "]", "=", "jbody", ".", "get", "(", "'errorMessage'", ")", "#", "# Unexpected response status", "#", "else", ":", "parsed_body", "=", "jbody", "response", ".", "body", "=", "parsed_body", "return", "response" ]
Serialize the value and adds it to the parent .
def serialize_on_parent ( self , parent , # type: ET.Element value , # type: Any state # type: _ProcessorState ) : # type: (...) -> None dict_value = self . _converter . to_dict ( value ) self . _dictionary . serialize_on_parent ( parent , dict_value , state )
3,950
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L718-L727
[ "def", "get_email_link", "(", "application", ")", ":", "# don't use secret_token unless we have to", "if", "(", "application", ".", "content_type", ".", "model", "==", "'person'", "and", "application", ".", "applicant", ".", "has_usable_password", "(", ")", ")", ":", "url", "=", "'%s/applications/%d/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "application", ".", "pk", ")", "is_secret", "=", "False", "else", ":", "url", "=", "'%s/applications/%s/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "application", ".", "secret_token", ")", "is_secret", "=", "True", "return", "url", ",", "is_secret" ]
Parse the provided element as an array .
def parse_at_element ( self , element , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any item_iter = element . findall ( self . _item_processor . element_path ) return self . _parse ( item_iter , state )
3,951
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L784-L792
[ "def", "_control", "(", "self", ",", "state", ")", ":", "# Renew subscription if necessary", "if", "not", "self", ".", "_subscription_is_recent", "(", ")", ":", "self", ".", "_subscribe", "(", ")", "cmd", "=", "MAGIC", "+", "CONTROL", "+", "self", ".", "_mac", "+", "PADDING_1", "+", "PADDING_2", "+", "state", "_LOGGER", ".", "debug", "(", "\"Sending new state to %s: %s\"", ",", "self", ".", "host", ",", "ord", "(", "state", ")", ")", "ack_state", "=", "self", ".", "_udp_transact", "(", "cmd", ",", "self", ".", "_control_resp", ",", "state", ")", "if", "ack_state", "is", "None", ":", "raise", "S20Exception", "(", "\"Device didn't acknowledge control request: {}\"", ".", "format", "(", "self", ".", "host", ")", ")" ]
Parse the root XML element as an array .
def parse_at_root ( self , root , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any if not self . _nested : raise InvalidRootProcessor ( 'Non-nested array "{}" cannot be root element' . format ( self . alias ) ) parsed_array = [ ] # type: List array_element = _element_find_from_root ( root , self . _nested ) if array_element is not None : parsed_array = self . parse_at_element ( array_element , state ) elif self . required : raise MissingValue ( 'Missing required array at root: "{}"' . format ( self . _nested ) ) return parsed_array
3,952
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L794-L813
[ "def", "set_temperature", "(", "self", ",", "zone", ",", "temperature", ",", "until", "=", "None", ")", ":", "if", "until", "is", "None", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Hold\"", ",", "\"NextTime\"", ":", "None", "}", "else", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Temporary\"", ",", "\"NextTime\"", ":", "until", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "}", "self", ".", "_set_heat_setpoint", "(", "zone", ",", "data", ")" ]
Parse the array data from the provided parent XML element .
def parse_from_parent ( self , parent , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any item_iter = parent . findall ( self . _item_path ) return self . _parse ( item_iter , state )
3,953
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L815-L823
[ "def", "_throttle_error", "(", "self", ",", "msg", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "now", "=", "time", ".", "time", "(", ")", "if", "msg", "in", "self", ".", "_errors", ":", "if", "(", "(", "now", "-", "self", ".", "_errors", "[", "msg", "]", ")", ">=", "self", ".", "server_error_interval", ")", ":", "fn", "=", "self", ".", "log", ".", "error", "self", ".", "_errors", "[", "msg", "]", "=", "now", "else", ":", "fn", "=", "self", ".", "log", ".", "debug", "else", ":", "self", ".", "_errors", "[", "msg", "]", "=", "now", "fn", "=", "self", ".", "log", ".", "error", "return", "fn", "(", "msg", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Serialize the value into a new Element object and return it .
def serialize ( self , value , # type: Any state # type: _ProcessorState ) : # type: (...) -> ET.Element if self . _nested is None : state . raise_error ( InvalidRootProcessor , 'Cannot directly serialize a non-nested array "{}"' . format ( self . alias ) ) if not value and self . required : state . raise_error ( MissingValue , 'Missing required array: "{}"' . format ( self . alias ) ) start_element , end_element = _element_path_create_new ( self . _nested ) self . _serialize ( end_element , value , state ) return start_element
3,954
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L825-L844
[ "def", "available_repositories", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "'data'", "not", "in", "kwargs", ":", "kwargs", "[", "'data'", "]", "=", "dict", "(", ")", "kwargs", "[", "'data'", "]", "[", "'product_id'", "]", "=", "self", ".", "product", ".", "id", "kwargs", "=", "kwargs", ".", "copy", "(", ")", "# shadow the passed-in kwargs", "kwargs", ".", "update", "(", "self", ".", "_server_config", ".", "get_client_kwargs", "(", ")", ")", "response", "=", "client", ".", "get", "(", "self", ".", "path", "(", "'available_repositories'", ")", ",", "*", "*", "kwargs", ")", "return", "_handle_response", "(", "response", ",", "self", ".", "_server_config", ")" ]
Serialize the value and append it to the parent element .
def serialize_on_parent ( self , parent , # type: ET.Element value , # type: Any state # type: _ProcessorState ) : # type: (...) -> None if not value and self . required : state . raise_error ( MissingValue , 'Missing required array: "{}"' . format ( self . alias ) ) if not value and self . omit_empty : return # Do nothing if self . _nested is not None : array_parent = _element_get_or_add_from_parent ( parent , self . _nested ) else : # Embedded array has all items serialized directly on the parent. array_parent = parent self . _serialize ( array_parent , value , state )
3,955
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L846-L867
[ "def", "is_registration_possible", "(", "self", ",", "user_info", ")", ":", "return", "self", ".", "get_accessibility", "(", ")", ".", "is_open", "(", ")", "and", "self", ".", "_registration", ".", "is_open", "(", ")", "and", "self", ".", "is_user_accepted_by_access_control", "(", "user_info", ")" ]
Parse the array data using the provided iterator of XML elements .
def _parse ( self , item_iter , # type: Iterable[ET.Element] state # type: _ProcessorState ) : # type: (...) -> List parsed_array = [ ] for i , item in enumerate ( item_iter ) : state . push_location ( self . _item_processor . element_path , i ) parsed_array . append ( self . _item_processor . parse_at_element ( item , state ) ) state . pop_location ( ) if not parsed_array and self . required : state . raise_error ( MissingValue , 'Missing required array "{}"' . format ( self . alias ) ) return parsed_array
3,956
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L869-L886
[ "def", "Modify", "(", "self", ",", "client_limit", "=", "None", ",", "client_rate", "=", "None", ",", "duration", "=", "None", ")", ":", "args", "=", "hunt_pb2", ".", "ApiModifyHuntArgs", "(", "hunt_id", "=", "self", ".", "hunt_id", ")", "if", "client_limit", "is", "not", "None", ":", "args", ".", "client_limit", "=", "client_limit", "if", "client_rate", "is", "not", "None", ":", "args", ".", "client_rate", "=", "client_rate", "if", "duration", "is", "not", "None", ":", "args", ".", "duration", "=", "duration", "data", "=", "self", ".", "_context", ".", "SendRequest", "(", "\"ModifyHunt\"", ",", "args", ")", "return", "Hunt", "(", "data", "=", "data", ",", "context", "=", "self", ".", "_context", ")" ]
Serialize the array value and add it to the array parent element .
def _serialize ( self , array_parent , # type: ET.Element value , # type: List state # type: _ProcessorState ) : # type: (...) -> None if not value : # Nothing to do. Avoid attempting to iterate over a possibly # None value. return for i , item_value in enumerate ( value ) : state . push_location ( self . _item_processor . element_path , i ) item_element = self . _item_processor . serialize ( item_value , state ) array_parent . append ( item_element ) state . pop_location ( )
3,957
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L888-L905
[ "def", "_connect", "(", "self", ")", ":", "try", ":", "# Open Connection", "self", ".", "influx", "=", "InfluxDBClient", "(", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "username", ",", "self", ".", "password", ",", "self", ".", "database", ",", "self", ".", "ssl", ")", "# Log", "self", ".", "log", ".", "debug", "(", "\"InfluxdbHandler: Established connection to \"", "\"%s:%d/%s.\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ")", "except", "Exception", "as", "ex", ":", "# Log Error", "self", ".", "_throttle_error", "(", "\"InfluxdbHandler: Failed to connect to \"", "\"%s:%d/%s. %s\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ",", "ex", ")", "# Close Socket", "self", ".", "_close", "(", ")", "return" ]
Parse the provided element as a dictionary .
def parse_at_element ( self , element , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any parsed_dict = { } for child in self . _child_processors : state . push_location ( child . element_path ) parsed_dict [ child . alias ] = child . parse_from_parent ( element , state ) state . pop_location ( ) return parsed_dict
3,958
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L945-L959
[ "def", "color_lerp", "(", "c1", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "c2", ":", "Tuple", "[", "int", ",", "int", ",", "int", "]", ",", "a", ":", "float", ")", "->", "Color", ":", "return", "Color", ".", "_new_from_cdata", "(", "lib", ".", "TCOD_color_lerp", "(", "c1", ",", "c2", ",", "a", ")", ")" ]
Parse the root XML element as a dictionary .
def parse_at_root ( self , root , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any parsed_dict = { } # type: Dict dict_element = _element_find_from_root ( root , self . element_path ) if dict_element is not None : parsed_dict = self . parse_at_element ( dict_element , state ) elif self . required : raise MissingValue ( 'Missing required root aggregate "{}"' . format ( self . element_path ) ) return parsed_dict
3,959
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L961-L976
[ "def", "set_temperature", "(", "self", ",", "zone", ",", "temperature", ",", "until", "=", "None", ")", ":", "if", "until", "is", "None", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Hold\"", ",", "\"NextTime\"", ":", "None", "}", "else", ":", "data", "=", "{", "\"Value\"", ":", "temperature", ",", "\"Status\"", ":", "\"Temporary\"", ",", "\"NextTime\"", ":", "until", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "}", "self", ".", "_set_heat_setpoint", "(", "zone", ",", "data", ")" ]
Serialize the value to a new element and return the element .
def serialize ( self , value , # type: Any state # type: _ProcessorState ) : # type: (...) -> ET.Element if not value and self . required : state . raise_error ( MissingValue , 'Missing required aggregate "{}"' . format ( self . element_path ) ) start_element , end_element = _element_path_create_new ( self . element_path ) self . _serialize ( end_element , value , state ) return start_element
3,960
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L996-L1010
[ "def", "available_repositories", "(", "self", ",", "*", "*", "kwargs", ")", ":", "if", "'data'", "not", "in", "kwargs", ":", "kwargs", "[", "'data'", "]", "=", "dict", "(", ")", "kwargs", "[", "'data'", "]", "[", "'product_id'", "]", "=", "self", ".", "product", ".", "id", "kwargs", "=", "kwargs", ".", "copy", "(", ")", "# shadow the passed-in kwargs", "kwargs", ".", "update", "(", "self", ".", "_server_config", ".", "get_client_kwargs", "(", ")", ")", "response", "=", "client", ".", "get", "(", "self", ".", "path", "(", "'available_repositories'", ")", ",", "*", "*", "kwargs", ")", "return", "_handle_response", "(", "response", ",", "self", ".", "_server_config", ")" ]
Serialize the dictionary and append all serialized children to the element .
def _serialize ( self , element , # type: ET.Element value , # type: Dict state # type: _ProcessorState ) : # type: (...) -> None for child in self . _child_processors : state . push_location ( child . element_path ) child_value = value . get ( child . alias ) child . serialize_on_parent ( element , child_value , state ) state . pop_location ( )
3,961
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1030-L1042
[ "def", "_get_regional_term", "(", "self", ",", "C", ",", "imt", ",", "vs30", ",", "rrup", ")", ":", "f3", "=", "interpolate", ".", "interp1d", "(", "[", "150", ",", "250", ",", "350", ",", "450", ",", "600", ",", "850", ",", "1150", ",", "2000", "]", ",", "[", "C", "[", "'a36'", "]", ",", "C", "[", "'a37'", "]", ",", "C", "[", "'a38'", "]", ",", "C", "[", "'a39'", "]", ",", "C", "[", "'a40'", "]", ",", "C", "[", "'a41'", "]", ",", "C", "[", "'a42'", "]", ",", "C", "[", "'a42'", "]", "]", ",", "kind", "=", "'linear'", ")", "return", "f3", "(", "vs30", ")", "+", "C", "[", "'a29'", "]", "*", "rrup" ]
Parse the given element .
def parse_at_element ( self , element , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any xml_value = self . _processor . parse_at_element ( element , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
3,962
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1078-L1086
[ "def", "_get_manifest_list", "(", "self", ",", "image", ")", ":", "if", "image", "in", "self", ".", "manifest_list_cache", ":", "return", "self", ".", "manifest_list_cache", "[", "image", "]", "manifest_list", "=", "get_manifest_list", "(", "image", ",", "image", ".", "registry", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "if", "'@sha256:'", "in", "str", "(", "image", ")", "and", "not", "manifest_list", ":", "# we want to adjust the tag only for manifest list fetching", "image", "=", "image", ".", "copy", "(", ")", "try", ":", "config_blob", "=", "get_config_from_registry", "(", "image", ",", "image", ".", "registry", ",", "image", ".", "tag", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "except", "(", "HTTPError", ",", "RetryError", ",", "Timeout", ")", "as", "ex", ":", "self", ".", "log", ".", "warning", "(", "'Unable to fetch config for %s, got error %s'", ",", "image", ",", "ex", ".", "response", ".", "status_code", ")", "raise", "RuntimeError", "(", "'Unable to fetch config for base image'", ")", "release", "=", "config_blob", "[", "'config'", "]", "[", "'Labels'", "]", "[", "'release'", "]", "version", "=", "config_blob", "[", "'config'", "]", "[", "'Labels'", "]", "[", "'version'", "]", "docker_tag", "=", "\"%s-%s\"", "%", "(", "version", ",", "release", ")", "image", ".", "tag", "=", "docker_tag", "manifest_list", "=", "get_manifest_list", "(", "image", ",", "image", ".", "registry", ",", "insecure", "=", "self", ".", "parent_registry_insecure", ",", "dockercfg_path", "=", "self", ".", "parent_registry_dockercfg_path", ")", "self", ".", "manifest_list_cache", "[", "image", "]", "=", "manifest_list", "return", "self", ".", "manifest_list_cache", "[", "image", "]" ]
Parse the given element as the root of the document .
def parse_at_root ( self , root , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any xml_value = self . _processor . parse_at_root ( root , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
3,963
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1088-L1096
[ "def", "_requires_refresh_token", "(", "self", ")", ":", "expires_on", "=", "datetime", ".", "datetime", ".", "strptime", "(", "self", ".", "login_data", "[", "'token'", "]", "[", "'expiresOn'", "]", ",", "'%Y-%m-%dT%H:%M:%SZ'", ")", "refresh", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "30", ")", "return", "expires_on", "<", "refresh" ]
Parse the element from the given parent element .
def parse_from_parent ( self , parent , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any xml_value = self . _processor . parse_from_parent ( parent , state ) return _hooks_apply_after_parse ( self . _hooks , state , xml_value )
3,964
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1098-L1106
[ "def", "create_token_response", "(", "self", ",", "request", ",", "token_handler", ")", ":", "headers", "=", "self", ".", "_get_default_headers", "(", ")", "try", ":", "if", "self", ".", "request_validator", ".", "client_authentication_required", "(", "request", ")", ":", "log", ".", "debug", "(", "'Authenticating client, %r.'", ",", "request", ")", "if", "not", "self", ".", "request_validator", ".", "authenticate_client", "(", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "elif", "not", "self", ".", "request_validator", ".", "authenticate_client_id", "(", "request", ".", "client_id", ",", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "log", ".", "debug", "(", "'Validating access token request, %r.'", ",", "request", ")", "self", ".", "validate_token_request", "(", "request", ")", "except", "errors", ".", "OAuth2Error", "as", "e", ":", "log", ".", "debug", "(", "'Client error in token request, %s.'", ",", "e", ")", "headers", ".", "update", "(", "e", ".", "headers", ")", "return", "headers", ",", "e", ".", "json", ",", "e", ".", "status_code", "token", "=", "token_handler", ".", "create_token", "(", "request", ",", "self", ".", "refresh_token", ")", "for", "modifier", "in", "self", ".", "_token_modifiers", ":", "token", "=", "modifier", "(", "token", ")", "self", ".", "request_validator", ".", "save_token", "(", "token", ",", "request", ")", "log", ".", "debug", "(", "'Issuing token %r to client id %r (%r) and username %s.'", ",", "token", ",", "request", ".", "client_id", ",", "request", ".", "client", ",", "request", ".", "username", ")", "return", "headers", ",", "json", ".", "dumps", "(", "token", ")", ",", "200" ]
Serialize the value and returns it .
def serialize ( self , value , # type: Any state # type: _ProcessorState ) : # type: (...) -> ET.Element xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) return self . _processor . serialize ( xml_value , state )
3,965
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1108-L1116
[ "def", "onboarding_message", "(", "*", "*", "payload", ")", ":", "# Get WebClient so you can communicate back to Slack.", "web_client", "=", "payload", "[", "\"web_client\"", "]", "# Get the id of the Slack user associated with the incoming event", "user_id", "=", "payload", "[", "\"data\"", "]", "[", "\"user\"", "]", "[", "\"id\"", "]", "# Open a DM with the new user.", "response", "=", "web_client", ".", "im_open", "(", "user_id", ")", "channel", "=", "response", "[", "\"channel\"", "]", "[", "\"id\"", "]", "# Post the onboarding message.", "start_onboarding", "(", "web_client", ",", "user_id", ",", "channel", ")" ]
Serialize the value directory on the parent .
def serialize_on_parent ( self , parent , # type: ET.Element value , # type: Any state # type: _ProcessorState ) : # type: (...) -> None xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) self . _processor . serialize_on_parent ( parent , xml_value , state )
3,966
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1118-L1127
[ "def", "trace_integration", "(", "tracer", "=", "None", ")", ":", "log", ".", "info", "(", "'Integrated module: {}'", ".", "format", "(", "MODULE_NAME", ")", ")", "# Wrap the httplib request function", "request_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_REQUEST_FUNC", ")", "wrapped_request", "=", "wrap_httplib_request", "(", "request_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "request_func", ".", "__name__", ",", "wrapped_request", ")", "# Wrap the httplib response function", "response_func", "=", "getattr", "(", "httplib", ".", "HTTPConnection", ",", "HTTPLIB_RESPONSE_FUNC", ")", "wrapped_response", "=", "wrap_httplib_response", "(", "response_func", ")", "setattr", "(", "httplib", ".", "HTTPConnection", ",", "response_func", ".", "__name__", ",", "wrapped_response", ")" ]
Parse the primitive value at the XML element .
def parse_at_element ( self , element , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any if self . _attribute : parsed_value = self . _parse_attribute ( element , self . _attribute , state ) else : parsed_value = self . _parser_func ( element . text , state ) return _hooks_apply_after_parse ( self . _hooks , state , parsed_value )
3,967
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1202-L1214
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Parse the primitive value under the parent XML element .
def parse_from_parent ( self , parent , # type: ET.Element state # type: _ProcessorState ) : # type: (...) -> Any element = parent . find ( self . element_path ) if element is None and self . required : state . raise_error ( MissingValue , 'Missing required element "{}"' . format ( self . element_path ) ) elif element is not None : return self . parse_at_element ( element , state ) return _hooks_apply_after_parse ( self . _hooks , state , self . _default )
3,968
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1216-L1232
[ "def", "_mod_repo_in_file", "(", "repo", ",", "repostr", ",", "filepath", ")", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ")", "as", "fhandle", ":", "output", "=", "[", "]", "for", "line", "in", "fhandle", ":", "cols", "=", "salt", ".", "utils", ".", "args", ".", "shlex_split", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_unicode", "(", "line", ")", ".", "strip", "(", ")", ")", "if", "repo", "not", "in", "cols", ":", "output", ".", "append", "(", "line", ")", "else", ":", "output", ".", "append", "(", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "repostr", "+", "'\\n'", ")", ")", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "filepath", ",", "'w'", ")", "as", "fhandle", ":", "fhandle", ".", "writelines", "(", "output", ")" ]
Serialize the value into a new element object and return the element .
def serialize ( self , value , # type: Any state # type: _ProcessorState ) : # type: (...) -> ET.Element # For primitive values, this is only called when the value is part of an array, # in which case we do not need to check for missing or omitted values. start_element , end_element = _element_path_create_new ( self . element_path ) self . _serialize ( end_element , value , state ) return start_element
3,969
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1234-L1249
[ "def", "get_condarc_channels", "(", "self", ",", "normalize", "=", "False", ",", "conda_url", "=", "'https://conda.anaconda.org'", ",", "channels", "=", "None", ")", ":", "# https://docs.continuum.io/anaconda-repository/configuration", "# They can only exist on a system condarc", "default_channels", "=", "self", ".", "load_rc", "(", "system", "=", "True", ")", ".", "get", "(", "'default_channels'", ",", "self", ".", "DEFAULT_CHANNELS", ")", "normalized_channels", "=", "[", "]", "if", "channels", "is", "None", ":", "condarc", "=", "self", ".", "load_rc", "(", ")", "channels", "=", "condarc", ".", "get", "(", "'channels'", ")", "if", "channels", "is", "None", ":", "channels", "=", "[", "'defaults'", "]", "if", "normalize", ":", "template", "=", "'{0}/{1}'", "if", "conda_url", "[", "-", "1", "]", "!=", "'/'", "else", "'{0}{1}'", "for", "channel", "in", "channels", ":", "if", "channel", "==", "'defaults'", ":", "normalized_channels", "+=", "default_channels", "elif", "channel", ".", "startswith", "(", "'http'", ")", ":", "normalized_channels", ".", "append", "(", "channel", ")", "else", ":", "# Append to the conda_url that comes from anaconda client", "# default_channel_alias key is deliberately ignored", "normalized_channels", ".", "append", "(", "template", ".", "format", "(", "conda_url", ",", "channel", ")", ")", "channels", "=", "normalized_channels", "return", "channels" ]
Serialize the value and add it to the parent element .
def serialize_on_parent ( self , parent , # type: ET.Element value , # type: Any state # type: _ProcessorState ) : # type: (...) -> None # Note that falsey values are not treated as missing, but they may be omitted. if value is None and self . required : state . raise_error ( MissingValue , self . _missing_value_message ( parent ) ) if not value and self . omit_empty : return # Do Nothing element = _element_get_or_add_from_parent ( parent , self . element_path ) self . _serialize ( element , value , state )
3,970
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1251-L1267
[ "def", "get_email_link", "(", "application", ")", ":", "# don't use secret_token unless we have to", "if", "(", "application", ".", "content_type", ".", "model", "==", "'person'", "and", "application", ".", "applicant", ".", "has_usable_password", "(", ")", ")", ":", "url", "=", "'%s/applications/%d/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "application", ".", "pk", ")", "is_secret", "=", "False", "else", ":", "url", "=", "'%s/applications/%s/'", "%", "(", "settings", ".", "REGISTRATION_BASE_URL", ",", "application", ".", "secret_token", ")", "is_secret", "=", "True", "return", "url", ",", "is_secret" ]
Return the message to report that the value needed for serialization is missing .
def _missing_value_message ( self , parent ) : # type: (ET.Element) -> Text if self . _attribute is None : message = 'Missing required value for element "{}"' . format ( self . element_path ) else : if self . element_path == '.' : parent_name = parent . tag else : parent_name = self . element_path message = 'Missing required value for attribute "{}" on element "{}"' . format ( self . _attribute , parent_name ) return message
3,971
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1269-L1283
[ "async", "def", "end", "(", "self", ")", ":", "try", ":", "await", "self", ".", "proc", ".", "wait", "(", ")", "finally", ":", "# Cleanup temporary files.", "for", "temporary_file", "in", "self", ".", "temporary_files", ":", "temporary_file", ".", "close", "(", ")", "self", ".", "temporary_files", "=", "[", "]", "return", "self", ".", "proc", ".", "returncode" ]
Parse the primitive value within the XML element s attribute .
def _parse_attribute ( self , element , # type: ET.Element attribute , # type: Text state # type: _ProcessorState ) : # type: (...) -> Any parsed_value = self . _default attribute_value = element . get ( attribute , None ) if attribute_value is not None : parsed_value = self . _parser_func ( attribute_value , state ) elif self . required : state . raise_error ( MissingValue , 'Missing required attribute "{}" on element "{}"' . format ( self . _attribute , element . tag ) ) return parsed_value
3,972
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1285-L1305
[ "def", "Run", "(", "self", ",", "arg", ")", ":", "if", "arg", "is", "None", ":", "arg", "=", "rdf_client_action", ".", "GetClientStatsRequest", "(", ")", "proc", "=", "psutil", ".", "Process", "(", "os", ".", "getpid", "(", ")", ")", "meminfo", "=", "proc", ".", "memory_info", "(", ")", "boot_time", "=", "rdfvalue", ".", "RDFDatetime", ".", "FromSecondsSinceEpoch", "(", "psutil", ".", "boot_time", "(", ")", ")", "create_time", "=", "rdfvalue", ".", "RDFDatetime", ".", "FromSecondsSinceEpoch", "(", "proc", ".", "create_time", "(", ")", ")", "stats_collector", "=", "stats_collector_instance", ".", "Get", "(", ")", "response", "=", "rdf_client_stats", ".", "ClientStats", "(", "RSS_size", "=", "meminfo", ".", "rss", ",", "VMS_size", "=", "meminfo", ".", "vms", ",", "memory_percent", "=", "proc", ".", "memory_percent", "(", ")", ",", "bytes_received", "=", "stats_collector", ".", "GetMetricValue", "(", "\"grr_client_received_bytes\"", ")", ",", "bytes_sent", "=", "stats_collector", ".", "GetMetricValue", "(", "\"grr_client_sent_bytes\"", ")", ",", "create_time", "=", "create_time", ",", "boot_time", "=", "boot_time", ")", "response", ".", "cpu_samples", "=", "self", ".", "grr_worker", ".", "stats_collector", ".", "CpuSamplesBetween", "(", "start_time", "=", "arg", ".", "start_time", ",", "end_time", "=", "arg", ".", "end_time", ")", "response", ".", "io_samples", "=", "self", ".", "grr_worker", ".", "stats_collector", ".", "IOSamplesBetween", "(", "start_time", "=", "arg", ".", "start_time", ",", "end_time", "=", "arg", ".", "end_time", ")", "self", ".", "Send", "(", "response", ")" ]
Serialize the value to the element .
def _serialize ( self , element , # type: ET.Element value , # type: Any state # type: _ProcessorState ) : # type: (...) -> None xml_value = _hooks_apply_before_serialize ( self . _hooks , state , value ) # A value is only considered missing, and hence eligible to be replaced by its # default only if it is None. Falsey values are not considered missing and are # not replaced by the default. if xml_value is None : if self . _default is None : serialized_value = Text ( '' ) else : serialized_value = Text ( self . _default ) else : serialized_value = Text ( xml_value ) if self . _attribute : element . set ( self . _attribute , serialized_value ) else : element . text = serialized_value
3,973
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1307-L1331
[ "def", "_ResponseToClientsFullInfo", "(", "self", ",", "response", ")", ":", "c_full_info", "=", "None", "prev_cid", "=", "None", "for", "row", "in", "response", ":", "(", "cid", ",", "fs", ",", "crt", ",", "ping", ",", "clk", ",", "ip", ",", "foreman", ",", "first", ",", "last_client_ts", ",", "last_crash_ts", ",", "last_startup_ts", ",", "client_obj", ",", "client_startup_obj", ",", "last_startup_obj", ",", "label_owner", ",", "label_name", ")", "=", "row", "if", "cid", "!=", "prev_cid", ":", "if", "c_full_info", ":", "yield", "db_utils", ".", "IntToClientID", "(", "prev_cid", ")", ",", "c_full_info", "metadata", "=", "rdf_objects", ".", "ClientMetadata", "(", "certificate", "=", "crt", ",", "fleetspeak_enabled", "=", "fs", ",", "first_seen", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "first", ")", ",", "ping", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "ping", ")", ",", "clock", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "clk", ")", ",", "ip", "=", "mysql_utils", ".", "StringToRDFProto", "(", "rdf_client_network", ".", "NetworkAddress", ",", "ip", ")", ",", "last_foreman_time", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "foreman", ")", ",", "startup_info_timestamp", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "last_startup_ts", ")", ",", "last_crash_timestamp", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "last_crash_ts", ")", ")", "if", "client_obj", "is", "not", "None", ":", "l_snapshot", "=", "rdf_objects", ".", "ClientSnapshot", ".", "FromSerializedString", "(", "client_obj", ")", "l_snapshot", ".", "timestamp", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "last_client_ts", ")", "l_snapshot", ".", "startup_info", "=", "rdf_client", ".", "StartupInfo", ".", "FromSerializedString", "(", "client_startup_obj", ")", "l_snapshot", ".", "startup_info", ".", "timestamp", "=", "l_snapshot", ".", "timestamp", "else", ":", "l_snapshot", "=", "rdf_objects", ".", "ClientSnapshot", "(", "client_id", "=", "db_utils", ".", "IntToClientID", "(", "cid", ")", ")", "if", "last_startup_obj", "is", "not", "None", ":", "startup_info", "=", "rdf_client", ".", "StartupInfo", ".", "FromSerializedString", "(", "last_startup_obj", ")", "startup_info", ".", "timestamp", "=", "mysql_utils", ".", "TimestampToRDFDatetime", "(", "last_startup_ts", ")", "else", ":", "startup_info", "=", "None", "prev_cid", "=", "cid", "c_full_info", "=", "rdf_objects", ".", "ClientFullInfo", "(", "metadata", "=", "metadata", ",", "labels", "=", "[", "]", ",", "last_snapshot", "=", "l_snapshot", ",", "last_startup_info", "=", "startup_info", ")", "if", "label_owner", "and", "label_name", ":", "c_full_info", ".", "labels", ".", "append", "(", "rdf_objects", ".", "ClientLabel", "(", "name", "=", "label_name", ",", "owner", "=", "label_owner", ")", ")", "if", "c_full_info", ":", "yield", "db_utils", ".", "IntToClientID", "(", "prev_cid", ")", ",", "c_full_info" ]
Push an item onto the state s stack of locations .
def push_location ( self , element_path , # type: Text array_index = None # type: Optional[int] ) : # type: (...) -> None location = ProcessorLocation ( element_path = element_path , array_index = array_index ) self . _locations . append ( location )
3,974
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1351-L1359
[ "def", "read_scen_file", "(", "filepath", ",", "columns", "=", "{", "\"model\"", ":", "[", "\"unspecified\"", "]", ",", "\"scenario\"", ":", "[", "\"unspecified\"", "]", ",", "\"climate_model\"", ":", "[", "\"unspecified\"", "]", ",", "}", ",", "*", "*", "kwargs", ")", ":", "mdata", "=", "MAGICCData", "(", "filepath", ",", "columns", "=", "columns", ",", "*", "*", "kwargs", ")", "return", "mdata" ]
Raise an exception with the current parser state information and error message .
def raise_error ( self , exception_type , # type: Type[Exception] message # type: Text ) : # type: (...) -> NoReturn error_message = '{} at {}' . format ( message , repr ( self ) ) raise exception_type ( error_message )
3,975
https://github.com/gatkin/declxml/blob/3a2324b43aee943e82a04587fbb68932c6f392ba/declxml.py#L1361-L1369
[ "def", "_broadcast_indexes", "(", "self", ",", "key", ")", ":", "key", "=", "self", ".", "_item_key_to_tuple", "(", "key", ")", "# key is a tuple", "# key is a tuple of full size", "key", "=", "indexing", ".", "expanded_indexer", "(", "key", ",", "self", ".", "ndim", ")", "# Convert a scalar Variable to an integer", "key", "=", "tuple", "(", "k", ".", "data", ".", "item", "(", ")", "if", "isinstance", "(", "k", ",", "Variable", ")", "and", "k", ".", "ndim", "==", "0", "else", "k", "for", "k", "in", "key", ")", "# Convert a 0d-array to an integer", "key", "=", "tuple", "(", "k", ".", "item", "(", ")", "if", "isinstance", "(", "k", ",", "np", ".", "ndarray", ")", "and", "k", ".", "ndim", "==", "0", "else", "k", "for", "k", "in", "key", ")", "if", "all", "(", "isinstance", "(", "k", ",", "BASIC_INDEXING_TYPES", ")", "for", "k", "in", "key", ")", ":", "return", "self", ".", "_broadcast_indexes_basic", "(", "key", ")", "self", ".", "_validate_indexers", "(", "key", ")", "# Detect it can be mapped as an outer indexer", "# If all key is unlabeled, or", "# key can be mapped as an OuterIndexer.", "if", "all", "(", "not", "isinstance", "(", "k", ",", "Variable", ")", "for", "k", "in", "key", ")", ":", "return", "self", ".", "_broadcast_indexes_outer", "(", "key", ")", "# If all key is 1-dimensional and there are no duplicate labels,", "# key can be mapped as an OuterIndexer.", "dims", "=", "[", "]", "for", "k", ",", "d", "in", "zip", "(", "key", ",", "self", ".", "dims", ")", ":", "if", "isinstance", "(", "k", ",", "Variable", ")", ":", "if", "len", "(", "k", ".", "dims", ")", ">", "1", ":", "return", "self", ".", "_broadcast_indexes_vectorized", "(", "key", ")", "dims", ".", "append", "(", "k", ".", "dims", "[", "0", "]", ")", "elif", "not", "isinstance", "(", "k", ",", "integer_types", ")", ":", "dims", ".", "append", "(", "d", ")", "if", "len", "(", "set", "(", "dims", ")", ")", "==", "len", "(", "dims", ")", ":", "return", "self", ".", "_broadcast_indexes_outer", "(", "key", ")", "return", "self", ".", "_broadcast_indexes_vectorized", "(", "key", ")" ]
call eagle and export sch or brd to partlist text file
def export_partlist_to_file ( input , output , timeout = 20 , showgui = False ) : input = norm_path ( input ) output = norm_path ( output ) commands = export_command ( output = output , output_type = 'partlist' ) command_eagle ( input = input , timeout = timeout , commands = commands , showgui = showgui )
3,976
https://github.com/ponty/eagexp/blob/1dd5108c1d8112cc87d1bda64fa6c2784ccf0ff2/eagexp/partlist.py#L16-L31
[ "def", "longest_non_repeat_v1", "(", "string", ")", ":", "if", "string", "is", "None", ":", "return", "0", "dict", "=", "{", "}", "max_length", "=", "0", "j", "=", "0", "for", "i", "in", "range", "(", "len", "(", "string", ")", ")", ":", "if", "string", "[", "i", "]", "in", "dict", ":", "j", "=", "max", "(", "dict", "[", "string", "[", "i", "]", "]", ",", "j", ")", "dict", "[", "string", "[", "i", "]", "]", "=", "i", "+", "1", "max_length", "=", "max", "(", "max_length", ",", "i", "-", "j", "+", "1", ")", "return", "max_length" ]
parse partlist text delivered by eagle .
def parse_partlist ( str ) : lines = str . strip ( ) . splitlines ( ) lines = filter ( len , lines ) hind = header_index ( lines ) if hind is None : log . debug ( 'empty partlist found' ) return ( [ ] , [ ] ) header_line = lines [ hind ] header = header_line . split ( ' ' ) header = filter ( len , header ) positions = [ header_line . index ( x ) for x in header ] header = [ x . strip ( ) . split ( ) [ 0 ] . lower ( ) for x in header ] data_lines = lines [ hind + 1 : ] def parse_data_line ( line ) : y = [ ( h , line [ pos1 : pos2 ] . strip ( ) ) for h , pos1 , pos2 in zip ( header , positions , positions [ 1 : ] + [ 1000 ] ) ] return dict ( y ) data = [ parse_data_line ( x ) for x in data_lines ] return ( header , data )
3,977
https://github.com/ponty/eagexp/blob/1dd5108c1d8112cc87d1bda64fa6c2784ccf0ff2/eagexp/partlist.py#L40-L69
[ "def", "feature_union_concat", "(", "Xs", ",", "nsamples", ",", "weights", ")", ":", "if", "any", "(", "x", "is", "FIT_FAILURE", "for", "x", "in", "Xs", ")", ":", "return", "FIT_FAILURE", "Xs", "=", "[", "X", "if", "w", "is", "None", "else", "X", "*", "w", "for", "X", ",", "w", "in", "zip", "(", "Xs", ",", "weights", ")", "if", "X", "is", "not", "None", "]", "if", "not", "Xs", ":", "return", "np", ".", "zeros", "(", "(", "nsamples", ",", "0", ")", ")", "if", "any", "(", "sparse", ".", "issparse", "(", "f", ")", "for", "f", "in", "Xs", ")", ":", "return", "sparse", ".", "hstack", "(", "Xs", ")", ".", "tocsr", "(", ")", "return", "np", ".", "hstack", "(", "Xs", ")" ]
export partlist by eagle then return it
def raw_partlist ( input , timeout = 20 , showgui = False ) : output = tempfile . NamedTemporaryFile ( prefix = 'eagexp_' , suffix = '.partlist' , delete = 0 ) . name export_partlist_to_file ( input = input , output = output , timeout = timeout , showgui = showgui ) s = Path ( output ) . text ( encoding = 'latin1' ) os . remove ( output ) return s
3,978
https://github.com/ponty/eagexp/blob/1dd5108c1d8112cc87d1bda64fa6c2784ccf0ff2/eagexp/partlist.py#L72-L87
[ "def", "create_rc_file", "(", "self", ",", "packages", ")", ":", "print", "(", "\"Creating rcfile '%s'\\n\"", "%", "self", ".", "rc_filename", ")", "# TODO bug with == in config file", "if", "not", "self", ".", "config", ".", "sections", "(", ")", ":", "self", ".", "_write_default_sections", "(", ")", "sections", "=", "{", "}", "section_text", "=", "[", "]", "for", "i", ",", "section", "in", "enumerate", "(", "self", ".", "config", ".", "sections", "(", ")", ")", ":", "if", "section", "==", "'metadata'", ":", "continue", "sections", "[", "i", "]", "=", "section", "section_text", ".", "append", "(", "'%s. %s'", "%", "(", "i", ",", "section", ")", ")", "section_text", "=", "' / '", ".", "join", "(", "section_text", ")", "self", ".", "_remap_stdin", "(", ")", "package_names", "=", "set", "(", ")", "lines", "=", "packages", ".", "readlines", "(", ")", "requirements", "=", "self", ".", "_parse_requirements", "(", "lines", ")", "for", "(", "package", ",", "version", ")", "in", "requirements", ":", "package_names", ".", "add", "(", "package", ")", "section", ",", "configured_version", "=", "self", ".", "_get_option", "(", "package", ")", "# Package already exists in configuration", "if", "section", ":", "# If there is a configured version, update it. If not, leave it unversioned.", "if", "configured_version", ":", "if", "configured_version", "!=", "version", ":", "print", "(", "\"Updating '%s' version from '%s' to '%s'\"", "%", "(", "package", ",", "configured_version", ",", "version", ")", ")", "self", ".", "config", ".", "set", "(", "section", ",", "package", ",", "version", ")", "continue", "section", "=", "self", ".", "_get_section", "(", "package", ",", "sections", ",", "section_text", ")", "self", ".", "_set_option", "(", "section", ",", "package", ",", "version", ")", "for", "section", "in", "self", ".", "config", ".", "sections", "(", ")", ":", "if", "section", "==", "'metadata'", ":", "continue", "for", "option", "in", "self", ".", "config", ".", "options", "(", "section", ")", ":", "if", "option", "not", "in", "package_names", ":", "print", "(", "\"Removing package '%s'\"", "%", "option", ")", "self", ".", "config", ".", "remove_option", "(", "section", ",", "option", ")", "rc_file", "=", "open", "(", "self", ".", "rc_filename", ",", "'w+'", ")", "self", ".", "config", ".", "write", "(", "rc_file", ")", "rc_file", ".", "close", "(", ")" ]
export partlist by eagle then parse it
def structured_partlist ( input , timeout = 20 , showgui = False ) : s = raw_partlist ( input = input , timeout = timeout , showgui = showgui ) return parse_partlist ( s )
3,979
https://github.com/ponty/eagexp/blob/1dd5108c1d8112cc87d1bda64fa6c2784ccf0ff2/eagexp/partlist.py#L90-L100
[ "def", "getInitialSample", "(", "self", ",", "wmg", ")", ":", "cands", "=", "range", "(", "len", "(", "wmg", ")", ")", "allPairs", "=", "itertools", ".", "combinations", "(", "cands", ",", "2", ")", "V", "=", "self", ".", "createBinaryRelation", "(", "len", "(", "cands", ")", ")", "for", "pair", "in", "allPairs", ":", "if", "wmg", "[", "pair", "[", "0", "]", "+", "1", "]", "[", "pair", "[", "1", "]", "+", "1", "]", ">", "0", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "1", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "0", "else", ":", "V", "[", "pair", "[", "0", "]", "]", "[", "pair", "[", "1", "]", "]", "=", "0", "V", "[", "pair", "[", "1", "]", "]", "[", "pair", "[", "0", "]", "]", "=", "1", "return", "V" ]
print partlist text delivered by eagle
def print_partlist ( input , timeout = 20 , showgui = False ) : print raw_partlist ( input = input , timeout = timeout , showgui = showgui )
3,980
https://github.com/ponty/eagexp/blob/1dd5108c1d8112cc87d1bda64fa6c2784ccf0ff2/eagexp/partlist.py#L104-L112
[ "def", "feature_union_concat", "(", "Xs", ",", "nsamples", ",", "weights", ")", ":", "if", "any", "(", "x", "is", "FIT_FAILURE", "for", "x", "in", "Xs", ")", ":", "return", "FIT_FAILURE", "Xs", "=", "[", "X", "if", "w", "is", "None", "else", "X", "*", "w", "for", "X", ",", "w", "in", "zip", "(", "Xs", ",", "weights", ")", "if", "X", "is", "not", "None", "]", "if", "not", "Xs", ":", "return", "np", ".", "zeros", "(", "(", "nsamples", ",", "0", ")", ")", "if", "any", "(", "sparse", ".", "issparse", "(", "f", ")", "for", "f", "in", "Xs", ")", ":", "return", "sparse", ".", "hstack", "(", "Xs", ")", ".", "tocsr", "(", ")", "return", "np", ".", "hstack", "(", "Xs", ")" ]
Graphviz source for the Hasse diagram of the domains Boolean algebra .
def bitset ( bs , member_label = None , filename = None , directory = None , format = None , render = False , view = False ) : if member_label is None : member_label = MEMBER_LABEL if filename is None : kind = 'members' if member_label else 'bits' filename = FILENAME % ( bs . __name__ , kind ) dot = graphviz . Digraph ( name = bs . __name__ , comment = repr ( bs ) , filename = filename , directory = directory , format = format , edge_attr = { 'dir' : 'none' } ) node_name = NAME_GETTERS [ 0 ] if callable ( member_label ) : node_label = member_label else : node_label = LABEL_GETTERS [ member_label ] for i in range ( bs . supremum + 1 ) : b = bs . fromint ( i ) name = node_name ( b ) dot . node ( name , node_label ( b ) ) dot . edges ( ( name , node_name ( b & ~ a ) ) for a in b . atoms ( reverse = True ) ) if render or view : dot . render ( view = view ) # pragma: no cover return dot
3,981
https://github.com/xflr6/bitsets/blob/ddcfe17e7c7a11f71f1c6764b2cecf7db05d9cdf/bitsets/visualize.py#L34-L68
[ "def", "_getNearestMappingIndexList", "(", "fromValList", ",", "toValList", ")", ":", "indexList", "=", "[", "]", "for", "fromTimestamp", "in", "fromValList", ":", "smallestDiff", "=", "_getSmallestDifference", "(", "toValList", ",", "fromTimestamp", ")", "i", "=", "toValList", ".", "index", "(", "smallestDiff", ")", "indexList", ".", "append", "(", "i", ")", "return", "indexList" ]
Returns a reference to the form element s constructor method .
def _get_field_method ( self , tp ) : method = self . field_constructor . get ( tp ) if method and hasattr ( self , method . __name__ ) : return getattr ( self , method . __name__ ) return method
3,982
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L66-L71
[ "def", "write_lines_to_file", "(", "cls_name", ",", "filename", ",", "lines", ",", "metadata_dict", ")", ":", "metadata_dict", "=", "metadata_dict", "or", "{", "}", "header_line", "=", "\"%s%s\"", "%", "(", "_HEADER_PREFIX", ",", "cls_name", ")", "metadata_line", "=", "\"%s%s\"", "%", "(", "_METADATA_PREFIX", ",", "json", ".", "dumps", "(", "metadata_dict", ",", "sort_keys", "=", "True", ")", ")", "with", "tf", ".", "io", ".", "gfile", ".", "GFile", "(", "filename", ",", "\"wb\"", ")", "as", "f", ":", "for", "line", "in", "[", "header_line", ",", "metadata_line", "]", ":", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "line", ")", ")", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ")", ")", "if", "lines", ":", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ".", "join", "(", "lines", ")", ")", ")", "f", ".", "write", "(", "tf", ".", "compat", ".", "as_bytes", "(", "\"\\n\"", ")", ")" ]
Creates the form element .
def _create_plain_field ( self , attr , options ) : method = self . _get_field_method ( attr . py_type ) or self . _create_other_field klass , options = method ( attr , options ) if attr . is_unique : options [ 'validators' ] . append ( validators . UniqueEntityValidator ( attr . entity ) ) return klass , options
3,983
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L77-L85
[ "def", "_set_vibration_nix", "(", "self", ",", "left_motor", ",", "right_motor", ",", "duration", ")", ":", "code", "=", "self", ".", "__get_vibration_code", "(", "left_motor", ",", "right_motor", ",", "duration", ")", "secs", ",", "msecs", "=", "convert_timeval", "(", "time", ".", "time", "(", ")", ")", "outer_event", "=", "struct", ".", "pack", "(", "EVENT_FORMAT", ",", "secs", ",", "msecs", ",", "0x15", ",", "code", ",", "1", ")", "self", ".", "_write_device", ".", "write", "(", "outer_event", ")", "self", ".", "_write_device", ".", "flush", "(", ")" ]
Creates the form element for working with entity relationships .
def _create_relational_field ( self , attr , options ) : options [ 'entity_class' ] = attr . py_type options [ 'allow_empty' ] = not attr . is_required return EntityField , options
3,984
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L92-L96
[ "def", "get_gcloud_pricelist", "(", ")", ":", "try", ":", "r", "=", "requests", ".", "get", "(", "'http://cloudpricingcalculator.appspot.com'", "'/static/data/pricelist.json'", ")", "content", "=", "json", ".", "loads", "(", "r", ".", "content", ")", "except", "ConnectionError", ":", "logger", ".", "warning", "(", "\"Couldn't get updated pricelist from \"", "\"http://cloudpricingcalculator.appspot.com\"", "\"/static/data/pricelist.json. Falling back to cached \"", "\"copy, but prices may be out of date.\"", ")", "with", "open", "(", "'gcloudpricelist.json'", ")", "as", "infile", ":", "content", "=", "json", ".", "load", "(", "infile", ")", "pricelist", "=", "content", "[", "'gcp_price_list'", "]", "return", "pricelist" ]
Adds an element to the form based on the entity attribute .
def add ( self , attr , field_class = None , * * options ) : # print(attr.name, attr.py_type, getattr(attr, 'set', None)) # print(dir(attr)) # print(attr, attr.is_relation, attr.is_collection) # print(attr.is_pk, attr.auto, attr.is_unique, attr.is_part_of_unique_index, attr.composite_keys) def add ( klass , options ) : if klass : self . _fields [ attr . name ] = field_class ( * * options ) if field_class else klass ( * * options ) return self kwargs = { 'label' : attr . name , 'default' : attr . default , 'validators' : [ ] , } kwargs . update ( options ) if attr . is_pk : return add ( * self . _create_pk_field ( attr , kwargs ) ) if attr . is_collection : return add ( * self . _create_collection_field ( attr , kwargs ) ) validator = wtf_validators . InputRequired ( ) if attr . is_required and not attr . is_pk else wtf_validators . Optional ( ) kwargs [ 'validators' ] . insert ( 0 , validator ) if attr . is_relation : return add ( * self . _create_relational_field ( attr , kwargs ) ) return add ( * self . _create_plain_field ( attr , kwargs ) )
3,985
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L102-L133
[ "def", "update_constants", "(", "nmrstar2cfg", "=", "\"\"", ",", "nmrstar3cfg", "=", "\"\"", ",", "resonance_classes_cfg", "=", "\"\"", ",", "spectrum_descriptions_cfg", "=", "\"\"", ")", ":", "nmrstar_constants", "=", "{", "}", "resonance_classes", "=", "{", "}", "spectrum_descriptions", "=", "{", "}", "this_directory", "=", "os", ".", "path", ".", "dirname", "(", "__file__", ")", "nmrstar2_config_filepath", "=", "os", ".", "path", ".", "join", "(", "this_directory", ",", "\"conf/constants_nmrstar2.json\"", ")", "nmrstar3_config_filepath", "=", "os", ".", "path", ".", "join", "(", "this_directory", ",", "\"conf/constants_nmrstar3.json\"", ")", "resonance_classes_config_filepath", "=", "os", ".", "path", ".", "join", "(", "this_directory", ",", "\"conf/resonance_classes.json\"", ")", "spectrum_descriptions_config_filepath", "=", "os", ".", "path", ".", "join", "(", "this_directory", ",", "\"conf/spectrum_descriptions.json\"", ")", "with", "open", "(", "nmrstar2_config_filepath", ",", "\"r\"", ")", "as", "nmrstar2config", ",", "open", "(", "nmrstar3_config_filepath", ",", "\"r\"", ")", "as", "nmrstar3config", ":", "nmrstar_constants", "[", "\"2\"", "]", "=", "json", ".", "load", "(", "nmrstar2config", ")", "nmrstar_constants", "[", "\"3\"", "]", "=", "json", ".", "load", "(", "nmrstar3config", ")", "with", "open", "(", "resonance_classes_config_filepath", ",", "\"r\"", ")", "as", "config", ":", "resonance_classes", ".", "update", "(", "json", ".", "load", "(", "config", ")", ")", "with", "open", "(", "spectrum_descriptions_config_filepath", ",", "\"r\"", ")", "as", "config", ":", "spectrum_descriptions", ".", "update", "(", "json", ".", "load", "(", "config", ")", ")", "if", "nmrstar2cfg", ":", "with", "open", "(", "nmrstar2cfg", ",", "\"r\"", ")", "as", "nmrstar2config", ":", "nmrstar_constants", "[", "\"2\"", "]", ".", "update", "(", "json", ".", "load", "(", "nmrstar2config", ")", ")", "if", "nmrstar3cfg", ":", "with", "open", "(", "nmrstar2cfg", ",", "\"r\"", ")", "as", "nmrstar3config", ":", "nmrstar_constants", "[", "\"3\"", "]", ".", "update", "(", "json", ".", "load", "(", "nmrstar3config", ")", ")", "if", "resonance_classes_cfg", ":", "with", "open", "(", "nmrstar2cfg", ",", "\"r\"", ")", "as", "config", ":", "resonance_classes", ".", "update", "(", "json", ".", "load", "(", "config", ")", ")", "if", "spectrum_descriptions_cfg", ":", "with", "open", "(", "spectrum_descriptions_cfg", ",", "\"r\"", ")", "as", "config", ":", "spectrum_descriptions", ".", "update", "(", "json", ".", "load", "(", "config", ")", ")", "NMRSTAR_CONSTANTS", ".", "update", "(", "nmrstar_constants", ")", "RESONANCE_CLASSES", ".", "update", "(", "resonance_classes", ")", "SPECTRUM_DESCRIPTIONS", ".", "update", "(", "spectrum_descriptions", ")" ]
Adds a button to the form .
def add_button ( self , name , button_class = wtf_fields . SubmitField , * * options ) : self . _buttons [ name ] = button_class ( * * options )
3,986
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L135-L137
[ "def", "clear_cache", "(", "self", ")", ":", "super", "(", "HyperparameterTuningJobAnalytics", ",", "self", ")", ".", "clear_cache", "(", ")", "self", ".", "_tuning_job_describe_result", "=", "None", "self", ".", "_training_job_summaries", "=", "None" ]
Creates a form element for the UUID type .
def field_uuid ( self , attr , options ) : options [ 'validators' ] . append ( validators . UUIDValidator ( attr . entity ) ) return wtf_fields . StringField , options
3,987
https://github.com/kyzima-spb/flask-pony/blob/6cf28d70b7ebf415d58fa138fcc70b8dd57432c7/flask_pony/orm.py#L198-L201
[ "def", "open_orders", "(", "self", ",", "symbol", "=", "None", ")", ":", "api", "=", "\"order\"", "query", "=", "{", "'ordStatus.isTerminated'", ":", "False", "}", "if", "symbol", "!=", "None", ":", "query", "[", "'symbol'", "]", "=", "symbol", "orders", "=", "self", ".", "_curl_bitmex", "(", "api", "=", "api", ",", "query", "=", "{", "'filter'", ":", "json", ".", "dumps", "(", "query", ")", "}", ",", "verb", "=", "\"GET\"", ")", "return", "orders" ]
This is super minimal and pretty hacky but it counts as a first pass .
def runm ( ) : signal . signal ( signal . SIGINT , signal_handler ) count = int ( sys . argv . pop ( 1 ) ) processes = [ Process ( target = run , args = ( ) ) for x in range ( count ) ] try : for p in processes : p . start ( ) except KeyError : # Not sure why we see a keyerror here. Weird. pass finally : for p in processes : p . join ( )
3,988
https://github.com/PaulMcMillan/tasa/blob/fd548d97fd08e61c0e71296b08ffedb7d949e06a/tasa/cli.py#L79-L93
[ "def", "_get_associated_classnames", "(", "self", ",", "classname", ",", "namespace", ",", "assoc_class", ",", "result_class", ",", "result_role", ",", "role", ")", ":", "class_repo", "=", "self", ".", "_get_class_repo", "(", "namespace", ")", "result_classes", "=", "self", ".", "_classnamedict", "(", "result_class", ",", "namespace", ")", "assoc_classes", "=", "self", ".", "_classnamedict", "(", "assoc_class", ",", "namespace", ")", "rtn_classnames_set", "=", "set", "(", ")", "role", "=", "role", ".", "lower", "(", ")", "if", "role", "else", "role", "result_role", "=", "result_role", ".", "lower", "(", ")", "if", "result_role", "else", "result_role", "ref_clns", "=", "self", ".", "_get_reference_classnames", "(", "classname", ",", "namespace", ",", "assoc_class", ",", "role", ")", "cls", "=", "[", "class_repo", "[", "cln", "]", "for", "cln", "in", "ref_clns", "]", "for", "cl", "in", "cls", ":", "for", "prop", "in", "six", ".", "itervalues", "(", "cl", ".", "properties", ")", ":", "if", "prop", ".", "type", "==", "'reference'", ":", "if", "self", ".", "_assoc_prop_matches", "(", "prop", ",", "cl", ".", "classname", ",", "assoc_classes", ",", "result_classes", ",", "result_role", ")", ":", "rtn_classnames_set", ".", "add", "(", "prop", ".", "reference_class", ")", "return", "list", "(", "rtn_classnames_set", ")" ]
Establish what identity this user claims to have from request .
def identify ( self , request ) : token = self . get_jwt ( request ) if token is None : return NO_IDENTITY try : claims_set = self . decode_jwt ( token ) except ( DecodeError , ExpiredSignatureError ) : return NO_IDENTITY userid = self . get_userid ( claims_set ) if userid is None : return NO_IDENTITY extra_claims = self . get_extra_claims ( claims_set ) if extra_claims is not None : return Identity ( userid = userid , * * extra_claims ) else : return Identity ( userid = userid )
3,989
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L150-L173
[ "def", "chgroups", "(", "name", ",", "groups", ",", "append", "=", "True", ")", ":", "if", "six", ".", "PY2", ":", "name", "=", "_to_unicode", "(", "name", ")", "if", "isinstance", "(", "groups", ",", "string_types", ")", ":", "groups", "=", "groups", ".", "split", "(", "','", ")", "groups", "=", "[", "x", ".", "strip", "(", "' *'", ")", "for", "x", "in", "groups", "]", "if", "six", ".", "PY2", ":", "groups", "=", "[", "_to_unicode", "(", "x", ")", "for", "x", "in", "groups", "]", "ugrps", "=", "set", "(", "list_groups", "(", "name", ")", ")", "if", "ugrps", "==", "set", "(", "groups", ")", ":", "return", "True", "name", "=", "_cmd_quote", "(", "name", ")", "if", "not", "append", ":", "for", "group", "in", "ugrps", ":", "group", "=", "_cmd_quote", "(", "group", ")", ".", "lstrip", "(", "'\\''", ")", ".", "rstrip", "(", "'\\''", ")", "if", "group", "not", "in", "groups", ":", "cmd", "=", "'net localgroup \"{0}\" {1} /delete'", ".", "format", "(", "group", ",", "name", ")", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", "for", "group", "in", "groups", ":", "if", "group", "in", "ugrps", ":", "continue", "group", "=", "_cmd_quote", "(", "group", ")", ".", "lstrip", "(", "'\\''", ")", ".", "rstrip", "(", "'\\''", ")", "cmd", "=", "'net localgroup \"{0}\" {1} /add'", ".", "format", "(", "group", ",", "name", ")", "out", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "python_shell", "=", "True", ")", "if", "out", "[", "'retcode'", "]", "!=", "0", ":", "log", ".", "error", "(", "out", "[", "'stdout'", "]", ")", "return", "False", "agrps", "=", "set", "(", "list_groups", "(", "name", ")", ")", "return", "len", "(", "ugrps", "-", "agrps", ")", "==", "0" ]
Remember identity on response .
def remember ( self , response , request , identity ) : claims = identity . as_dict ( ) userid = claims . pop ( 'userid' ) claims_set = self . create_claims_set ( request , userid , claims ) token = self . encode_jwt ( claims_set ) response . headers [ 'Authorization' ] = '%s %s' % ( self . auth_header_prefix , token )
3,990
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L175-L196
[ "def", "OnAdjustVolume", "(", "self", ",", "event", ")", ":", "self", ".", "volume", "=", "self", ".", "player", ".", "audio_get_volume", "(", ")", "if", "event", ".", "GetWheelRotation", "(", ")", "<", "0", ":", "self", ".", "volume", "=", "max", "(", "0", ",", "self", ".", "volume", "-", "10", ")", "elif", "event", ".", "GetWheelRotation", "(", ")", ">", "0", ":", "self", ".", "volume", "=", "min", "(", "200", ",", "self", ".", "volume", "+", "10", ")", "self", ".", "player", ".", "audio_set_volume", "(", "self", ".", "volume", ")" ]
Decode a JWTAuth token into its claims set .
def decode_jwt ( self , token , verify_expiration = True ) : options = { 'verify_exp' : verify_expiration , } return jwt . decode ( token , self . public_key , algorithms = [ self . algorithm ] , options = options , leeway = self . leeway , issuer = self . issuer )
3,991
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L214-L239
[ "def", "start", "(", "self", ")", ":", "for", "config_class", "in", "self", ".", "watched_configurables", ":", "monitor", "=", "ConfigFileMonitor", "(", "config_class", ",", "self", ".", "config_dir", ")", "self", ".", "observers", ".", "append", "(", "monitor", ".", "start", "(", "self", ".", "add_configurable", ",", "self", ".", "update_configurable", ",", "self", ".", "remove_configurable", ")", ")", "wait_on_event", "(", "self", ".", "shutdown", ")" ]
Create the claims set based on the userid of the claimed identity the settings and the extra_claims dictionary .
def create_claims_set ( self , request , userid , extra_claims = None ) : claims_set = { self . userid_claim : userid } now = timegm ( datetime . utcnow ( ) . utctimetuple ( ) ) if self . expiration_delta is not None : claims_set [ 'exp' ] = now + self . expiration_delta if self . issuer is not None : claims_set [ 'iss' ] = self . issuer if self . allow_refresh : if self . refresh_delta is not None : claims_set [ 'refresh_until' ] = now + self . refresh_delta if self . refresh_nonce_handler is not None : claims_set [ 'nonce' ] = self . refresh_nonce_handler ( request , userid ) if extra_claims is not None : claims_set . update ( extra_claims ) return claims_set
3,992
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L241-L280
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Encode a JWT token based on the claims_set and the settings .
def encode_jwt ( self , claims_set ) : token = jwt . encode ( claims_set , self . private_key , self . algorithm ) if PY3 : token = token . decode ( encoding = 'UTF-8' ) return token
3,993
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L282-L299
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Get claims holding extra identity info from the claims set .
def get_extra_claims ( self , claims_set ) : reserved_claims = ( self . userid_claim , "iss" , "aud" , "exp" , "nbf" , "iat" , "jti" , "refresh_until" , "nonce" ) extra_claims = { } for claim in claims_set : if claim not in reserved_claims : extra_claims [ claim ] = claims_set [ claim ] if not extra_claims : return None return extra_claims
3,994
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L314-L333
[ "def", "get_api_v1_info", "(", "api_prefix", ")", ":", "websocket_root", "=", "base_ws_uri", "(", ")", "+", "EVENTS_ENDPOINT", "docs_url", "=", "[", "'https://docs.bigchaindb.com/projects/server/en/v'", ",", "version", ".", "__version__", ",", "'/http-client-server-api.html'", ",", "]", "return", "{", "'docs'", ":", "''", ".", "join", "(", "docs_url", ")", ",", "'transactions'", ":", "'{}transactions/'", ".", "format", "(", "api_prefix", ")", ",", "'blocks'", ":", "'{}blocks/'", ".", "format", "(", "api_prefix", ")", ",", "'assets'", ":", "'{}assets/'", ".", "format", "(", "api_prefix", ")", ",", "'outputs'", ":", "'{}outputs/'", ".", "format", "(", "api_prefix", ")", ",", "'streams'", ":", "websocket_root", ",", "'metadata'", ":", "'{}metadata/'", ".", "format", "(", "api_prefix", ")", ",", "'validators'", ":", "'{}validators'", ".", "format", "(", "api_prefix", ")", ",", "}" ]
Extract the JWT token from the authorisation header of the request .
def get_jwt ( self , request ) : try : authorization = request . authorization except ValueError : return None if authorization is None : return None authtype , token = authorization if authtype . lower ( ) != self . auth_header_prefix . lower ( ) : return None return token
3,995
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L335-L352
[ "def", "start", "(", "self", ")", ":", "for", "config_class", "in", "self", ".", "watched_configurables", ":", "monitor", "=", "ConfigFileMonitor", "(", "config_class", ",", "self", ".", "config_dir", ")", "self", ".", "observers", ".", "append", "(", "monitor", ".", "start", "(", "self", ".", "add_configurable", ",", "self", ".", "update_configurable", ",", "self", ".", "remove_configurable", ")", ")", "wait_on_event", "(", "self", ".", "shutdown", ")" ]
Verify if the request to refresh the token is valid . If valid it returns the userid which can be used to create an updated identity with remember_identity . Otherwise it raises an exception based on InvalidTokenError .
def verify_refresh ( self , request ) : if not self . allow_refresh : raise InvalidTokenError ( 'Token refresh is disabled' ) token = self . get_jwt ( request ) if token is None : raise InvalidTokenError ( 'Token not found' ) try : claims_set = self . decode_jwt ( token , self . verify_expiration_on_refresh ) # reraise the exceptions to change the error messages except DecodeError : raise DecodeError ( 'Token could not be decoded' ) except ExpiredSignatureError : raise ExpiredSignatureError ( 'Token has expired' ) userid = self . get_userid ( claims_set ) if userid is None : raise MissingRequiredClaimError ( self . userid_claim ) if self . refresh_nonce_handler is not None : if 'nonce' not in claims_set : raise MissingRequiredClaimError ( 'nonce' ) if self . refresh_nonce_handler ( request , userid ) != claims_set [ 'nonce' ] : raise InvalidTokenError ( 'Refresh nonce is not valid' ) if self . refresh_delta is not None : if 'refresh_until' not in claims_set : raise MissingRequiredClaimError ( 'refresh_until' ) now = timegm ( datetime . utcnow ( ) . utctimetuple ( ) ) refresh_until = int ( claims_set [ 'refresh_until' ] ) if refresh_until < ( now - self . leeway ) : raise ExpiredSignatureError ( 'Refresh nonce has expired' ) return userid
3,996
https://github.com/morepath/more.jwtauth/blob/1c3c5731612069a092e44cf612641c05edf1f083/more/jwtauth/main.py#L354-L404
[ "def", "distanceTo", "(", "self", ",", "tree", ")", ":", "return", "ViewClient", ".", "distance", "(", "ViewClient", ".", "__pickleable", "(", "self", ".", "views", ")", ",", "tree", ")" ]
Compute a robust linear fit using the Theil - Sen method .
def fit_theil_sen ( x , y ) : xx = numpy . asarray ( x ) y1 = numpy . asarray ( y ) n = len ( xx ) if n < 5 : raise ValueError ( 'Number of points < 5' ) if xx . ndim != 1 : raise ValueError ( 'Input arrays have unexpected dimensions' ) if y1 . ndim == 1 : if len ( y1 ) != n : raise ValueError ( 'X and Y arrays have different sizes' ) yy = y1 [ numpy . newaxis , : ] elif y1 . ndim == 2 : if n != y1 . shape [ 0 ] : raise ValueError ( 'Y-array size in the fitting direction is different to the X-array size' ) yy = y1 . T else : raise ValueError ( 'Input arrays have unexpected dimensions' ) nmed = n // 2 iextra = nmed if ( n % 2 ) == 0 else nmed + 1 deltx = xx [ iextra : ] - xx [ : nmed ] delty = yy [ : , iextra : ] - yy [ : , : nmed ] allslopes = delty / deltx slopes = numpy . median ( allslopes , axis = 1 ) allinters = yy - slopes [ : , numpy . newaxis ] * x inters = numpy . median ( allinters , axis = 1 ) coeff = numpy . array ( [ inters , slopes ] ) return numpy . squeeze ( coeff )
3,997
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/array/robustfit.py#L15-L80
[ "def", "measure", "(", "self", ",", "vid", ")", ":", "from", "ambry", ".", "orm", "import", "Column", "if", "isinstance", "(", "vid", ",", "PartitionColumn", ")", ":", "return", "vid", "elif", "isinstance", "(", "vid", ",", "Column", ")", ":", "return", "PartitionColumn", "(", "vid", ")", "else", ":", "return", "PartitionColumn", "(", "self", ".", "table", ".", "column", "(", "vid", ")", ",", "self", ")" ]
Process arguments unknown to the parser
def process_unknown_arguments ( unknowns ) : result = argparse . Namespace ( ) result . extra_control = { } # It would be interesting to use argparse internal # machinery for this for unknown in unknowns : # Check prefixes prefix = '--parameter-' if unknown . startswith ( prefix ) : # process '=' values = unknown . split ( '=' ) if len ( values ) == 2 : key = values [ 0 ] [ len ( prefix ) : ] val = values [ 1 ] if key : result . extra_control [ key ] = val return result
3,998
https://github.com/guaix-ucm/numina/blob/6c829495df8937f77c2de9383c1038ffb3e713e3/numina/user/cli.py#L152-L170
[ "def", "check_dependicies", "(", "objdump_string", ")", ":", "GLIBC_version", "=", "re", ".", "compile", "(", "r'0{16}[ \\t]+GLIBC_(\\d{1,2})[.](\\d{1,3})[.]?\\d{,3}[ \\t]+'", ")", "versions", "=", "GLIBC_version", ".", "findall", "(", "objdump_string", ")", "assert", "len", "(", "versions", ")", ">", "1", "for", "major", ",", "minor", "in", "versions", ":", "assert", "int", "(", "major", ")", "<=", "2", "assert", "int", "(", "minor", ")", "<=", "14", "GLIBCXX_version", "=", "re", ".", "compile", "(", "r'0{16}[ \\t]+GLIBCXX_(\\d{1,2})[.](\\d{1,2})[.]?(\\d{,3})[ \\t]+'", ")", "versions", "=", "GLIBCXX_version", ".", "findall", "(", "objdump_string", ")", "assert", "len", "(", "versions", ")", ">", "1", "for", "major", ",", "minor", ",", "patch", "in", "versions", ":", "assert", "int", "(", "major", ")", "==", "3", "assert", "int", "(", "minor", ")", "==", "4", "assert", "patch", "==", "''", "or", "int", "(", "patch", ")", "<=", "19", "GOMP_version", "=", "re", ".", "compile", "(", "r'0{16}[ \\t]+G?OMP_(\\d{1,2})[.](\\d{1,2})[.]?\\d{,3}[ \\t]+'", ")", "versions", "=", "GOMP_version", ".", "findall", "(", "objdump_string", ")", "assert", "len", "(", "versions", ")", ">", "1", "for", "major", ",", "minor", "in", "versions", ":", "assert", "int", "(", "major", ")", "==", "1", "assert", "int", "(", "minor", ")", "==", "0" ]
Helper function for getting a file descriptor .
def get_fd ( file_or_fd , default = None ) : fd = file_or_fd if fd is None : fd = default if hasattr ( fd , "fileno" ) : fd = fd . fileno ( ) return fd
3,999
https://github.com/rfk/playitagainsam/blob/897cc8e8ca920a4afb8597b4a345361065a3f108/playitagainsam/util.py#L65-L72
[ "def", "put_lifecycle_configuration", "(", "Bucket", ",", "Rules", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "try", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "Rules", "is", "not", "None", "and", "isinstance", "(", "Rules", ",", "six", ".", "string_types", ")", ":", "Rules", "=", "salt", ".", "utils", ".", "json", ".", "loads", "(", "Rules", ")", "conn", ".", "put_bucket_lifecycle_configuration", "(", "Bucket", "=", "Bucket", ",", "LifecycleConfiguration", "=", "{", "'Rules'", ":", "Rules", "}", ")", "return", "{", "'updated'", ":", "True", ",", "'name'", ":", "Bucket", "}", "except", "ClientError", "as", "e", ":", "return", "{", "'updated'", ":", "False", ",", "'error'", ":", "__utils__", "[", "'boto3.get_error'", "]", "(", "e", ")", "}" ]