query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Check the given Dinf angle based on D8 flow direction encoding code by ArcGIS
def check_orthogonal ( angle ) : flow_dir_taudem = - 1 flow_dir = - 1 if MathClass . floatequal ( angle , FlowModelConst . e ) : flow_dir_taudem = FlowModelConst . e flow_dir = 1 elif MathClass . floatequal ( angle , FlowModelConst . ne ) : flow_dir_taudem = FlowModelConst . ne flow_dir = 128 elif MathClass . floatequal ( angle , FlowModelConst . n ) : flow_dir_taudem = FlowModelConst . n flow_dir = 64 elif MathClass . floatequal ( angle , FlowModelConst . nw ) : flow_dir_taudem = FlowModelConst . nw flow_dir = 32 elif MathClass . floatequal ( angle , FlowModelConst . w ) : flow_dir_taudem = FlowModelConst . w flow_dir = 16 elif MathClass . floatequal ( angle , FlowModelConst . sw ) : flow_dir_taudem = FlowModelConst . sw flow_dir = 8 elif MathClass . floatequal ( angle , FlowModelConst . s ) : flow_dir_taudem = FlowModelConst . s flow_dir = 4 elif MathClass . floatequal ( angle , FlowModelConst . se ) : flow_dir_taudem = FlowModelConst . se flow_dir = 2 return flow_dir_taudem , flow_dir
5,900
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/postTauDEM.py#L37-L65
[ "def", "delete_persistent_data", "(", "role", ",", "zk_node", ")", ":", "if", "role", ":", "destroy_volumes", "(", "role", ")", "unreserve_resources", "(", "role", ")", "if", "zk_node", ":", "delete_zk_node", "(", "zk_node", ")" ]
start the instrument thread
def start ( self , * args , * * kwargs ) : self . _stop = False super ( Plant , self ) . start ( * args , * * kwargs )
5,901
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/instruments/instrument_dummy.py#L121-L127
[ "def", "fix_page_relative_url", "(", "rel_url", ")", ":", "rel_url", "=", "rel_url", ".", "lstrip", "(", "'/'", ")", "# trim all heading '/'", "endswith_slash", "=", "rel_url", ".", "endswith", "(", "'/'", ")", "rel_url", "=", "rel_url", ".", "rstrip", "(", "'/'", ")", "+", "(", "'/'", "if", "endswith_slash", "else", "''", ")", "# preserve only one trailing '/'", "if", "not", "rel_url", "or", "rel_url", "==", "'/'", ":", "return", "None", ",", "False", "file_path", "=", "os", ".", "path", ".", "join", "(", "current_app", ".", "instance_path", ",", "'pages'", ",", "rel_url", ".", "replace", "(", "'/'", ",", "os", ".", "path", ".", "sep", ")", ")", "if", "rel_url", ".", "endswith", "(", "'/'", ")", ":", "index_html_file_path", "=", "os", ".", "path", ".", "join", "(", "file_path", ",", "'index.html'", ")", "if", "os", ".", "path", ".", "isfile", "(", "index_html_file_path", ")", ":", "# index.html exists", "return", "index_html_file_path", ",", "True", "return", "rel_url", ",", "False", "elif", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file_path", ")", "[", "1", "]", "[", "1", ":", "]", "if", "get_standard_format_name", "(", "ext", ")", "is", "not", "None", ":", "# is source of custom page", "if", "current_app", ".", "config", "[", "'PAGE_SOURCE_ACCESSIBLE'", "]", ":", "return", "file_path", ",", "True", "else", ":", "# is other direct files", "return", "file_path", ",", "True", "elif", "os", ".", "path", ".", "isdir", "(", "file_path", ")", ":", "return", "rel_url", "+", "'/'", ",", "False", "sp", "=", "rel_url", ".", "rsplit", "(", "'/'", ",", "1", ")", "m", "=", "re", ".", "match", "(", "r'(.+)\\.html?'", ",", "sp", "[", "-", "1", "]", ")", "if", "m", ":", "sp", "[", "-", "1", "]", "=", "m", ".", "group", "(", "1", ")", "+", "'.html'", "else", ":", "sp", "[", "-", "1", "]", "+=", "'.html'", "return", "'/'", ".", "join", "(", "sp", ")", ",", "False" ]
quit the instrument thread
def quit ( self , * args , * * kwargs ) : # real signature unknown self . stop ( ) self . _stop = True self . msleep ( 2 * int ( 1e3 / self . settings [ 'update frequency' ] ) ) super ( Plant , self ) . quit ( * args , * * kwargs )
5,902
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/instruments/instrument_dummy.py#L130-L137
[ "def", "fix_page_relative_url", "(", "rel_url", ")", ":", "rel_url", "=", "rel_url", ".", "lstrip", "(", "'/'", ")", "# trim all heading '/'", "endswith_slash", "=", "rel_url", ".", "endswith", "(", "'/'", ")", "rel_url", "=", "rel_url", ".", "rstrip", "(", "'/'", ")", "+", "(", "'/'", "if", "endswith_slash", "else", "''", ")", "# preserve only one trailing '/'", "if", "not", "rel_url", "or", "rel_url", "==", "'/'", ":", "return", "None", ",", "False", "file_path", "=", "os", ".", "path", ".", "join", "(", "current_app", ".", "instance_path", ",", "'pages'", ",", "rel_url", ".", "replace", "(", "'/'", ",", "os", ".", "path", ".", "sep", ")", ")", "if", "rel_url", ".", "endswith", "(", "'/'", ")", ":", "index_html_file_path", "=", "os", ".", "path", ".", "join", "(", "file_path", ",", "'index.html'", ")", "if", "os", ".", "path", ".", "isfile", "(", "index_html_file_path", ")", ":", "# index.html exists", "return", "index_html_file_path", ",", "True", "return", "rel_url", ",", "False", "elif", "os", ".", "path", ".", "isfile", "(", "file_path", ")", ":", "ext", "=", "os", ".", "path", ".", "splitext", "(", "file_path", ")", "[", "1", "]", "[", "1", ":", "]", "if", "get_standard_format_name", "(", "ext", ")", "is", "not", "None", ":", "# is source of custom page", "if", "current_app", ".", "config", "[", "'PAGE_SOURCE_ACCESSIBLE'", "]", ":", "return", "file_path", ",", "True", "else", ":", "# is other direct files", "return", "file_path", ",", "True", "elif", "os", ".", "path", ".", "isdir", "(", "file_path", ")", ":", "return", "rel_url", "+", "'/'", ",", "False", "sp", "=", "rel_url", ".", "rsplit", "(", "'/'", ",", "1", ")", "m", "=", "re", ".", "match", "(", "r'(.+)\\.html?'", ",", "sp", "[", "-", "1", "]", ")", "if", "m", ":", "sp", "[", "-", "1", "]", "=", "m", ".", "group", "(", "1", ")", "+", "'.html'", "else", ":", "sp", "[", "-", "1", "]", "+=", "'.html'", "return", "'/'", ".", "join", "(", "sp", ")", ",", "False" ]
Calculate PI output value for given reference input and feedback
def controler_output ( self , current_value ) : set_point = self . settings [ 'set_point' ] Kp = self . settings [ 'gains' ] [ 'proportional' ] Ki = self . settings [ 'gains' ] [ 'integral' ] output_range = self . settings [ 'output_range' ] time_step = self . settings [ 'time_step' ] error_new = set_point - current_value print ( ( 'PD- error:\t' , error_new , Ki , Kp , time_step ) ) #proportional action self . u_P = Kp * error_new * time_step print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) #integral action self . u_I += Kp * Ki * ( error_new + self . error ) / 2.0 * time_step self . error = error_new print ( ( 'PD- self.u_P:\t' , self . u_P , self . u_I ) ) # anti-windup if self . u_P + self . u_I > output_range [ 'max' ] : self . u_I = output_range [ 'max' ] - self . u_P if self . u_P + self . u_I < output_range [ 'min' ] : self . u_I = output_range [ 'min' ] - self . u_P output = self . u_P + self . u_I print ( ( 'PD- output:\t' , output ) ) return output
5,903
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/instruments/instrument_dummy.py#L226-L259
[ "def", "asynchronize", "(", "framework", ",", "sync_method", ",", "doc", "=", "None", ",", "wrap_class", "=", "None", ",", "unwrap_class", "=", "None", ")", ":", "@", "functools", ".", "wraps", "(", "sync_method", ")", "def", "method", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "unwrap_class", "is", "not", "None", ":", "# Don't call isinstance(), not checking subclasses.", "unwrapped_args", "=", "[", "obj", ".", "delegate", "if", "obj", ".", "__class__", ".", "__name__", ".", "endswith", "(", "(", "unwrap_class", ",", "'MotorClientSession'", ")", ")", "else", "obj", "for", "obj", "in", "args", "]", "unwrapped_kwargs", "=", "{", "key", ":", "(", "obj", ".", "delegate", "if", "obj", ".", "__class__", ".", "__name__", ".", "endswith", "(", "(", "unwrap_class", ",", "'MotorClientSession'", ")", ")", "else", "obj", ")", "for", "key", ",", "obj", "in", "kwargs", ".", "items", "(", ")", "}", "else", ":", "# For speed, don't call unwrap_args_session/unwrap_kwargs_session.", "unwrapped_args", "=", "[", "obj", ".", "delegate", "if", "obj", ".", "__class__", ".", "__name__", ".", "endswith", "(", "'MotorClientSession'", ")", "else", "obj", "for", "obj", "in", "args", "]", "unwrapped_kwargs", "=", "{", "key", ":", "(", "obj", ".", "delegate", "if", "obj", ".", "__class__", ".", "__name__", ".", "endswith", "(", "'MotorClientSession'", ")", "else", "obj", ")", "for", "key", ",", "obj", "in", "kwargs", ".", "items", "(", ")", "}", "loop", "=", "self", ".", "get_io_loop", "(", ")", "return", "framework", ".", "run_on_executor", "(", "loop", ",", "sync_method", ",", "self", ".", "delegate", ",", "*", "unwrapped_args", ",", "*", "*", "unwrapped_kwargs", ")", "if", "wrap_class", "is", "not", "None", ":", "method", "=", "framework", ".", "pymongo_class_wrapper", "(", "method", ",", "wrap_class", ")", "method", ".", "is_wrap_method", "=", "True", "# For Synchro.", "# This is for the benefit of motor_extensions.py, which needs this info to", "# generate documentation with Sphinx.", "method", ".", "is_async_method", "=", "True", "name", "=", "sync_method", ".", "__name__", "method", ".", "pymongo_method_name", "=", "name", "if", "doc", "is", "not", "None", ":", "method", ".", "__doc__", "=", "doc", "return", "method" ]
Validate options and apply defaults for options not supplied .
def get_opts ( opts ) : defaults = { 'board' : None , 'terrain' : Opt . random , 'numbers' : Opt . preset , 'ports' : Opt . preset , 'pieces' : Opt . preset , 'players' : Opt . preset , } _opts = defaults . copy ( ) if opts is None : opts = dict ( ) try : for key , val in opts . copy ( ) . items ( ) : if key == 'board' : # board is a string, not a regular opt, and gets special handling # in _read_tiles_from_string continue opts [ key ] = Opt ( val ) _opts . update ( opts ) except Exception : raise ValueError ( 'Invalid options={}' . format ( opts ) ) logging . debug ( 'used defaults=\n{}\n on opts=\n{}\nreturned total opts=\n{}' . format ( pprint . pformat ( defaults ) , pprint . pformat ( opts ) , pprint . pformat ( _opts ) ) ) return _opts
5,904
https://github.com/rosshamish/catan-py/blob/120438a8f16e39c13322c5d5930e1064e1d3f4be/catan/boardbuilder.py#L40-L72
[ "def", "build_gy", "(", "self", ",", "dae", ")", ":", "if", "not", "self", ".", "n", ":", "idx", "=", "range", "(", "dae", ".", "m", ")", "dae", ".", "set_jac", "(", "Gy", ",", "1e-6", ",", "idx", ",", "idx", ")", "return", "Vn", "=", "polar", "(", "1.0", ",", "dae", ".", "y", "[", "self", ".", "a", "]", ")", "Vc", "=", "mul", "(", "dae", ".", "y", "[", "self", ".", "v", "]", ",", "Vn", ")", "Ic", "=", "self", ".", "Y", "*", "Vc", "diagVn", "=", "spdiag", "(", "Vn", ")", "diagVc", "=", "spdiag", "(", "Vc", ")", "diagIc", "=", "spdiag", "(", "Ic", ")", "dS", "=", "self", ".", "Y", "*", "diagVn", "dS", "=", "diagVc", "*", "conj", "(", "dS", ")", "dS", "+=", "conj", "(", "diagIc", ")", "*", "diagVn", "dR", "=", "diagIc", "dR", "-=", "self", ".", "Y", "*", "diagVc", "dR", "=", "diagVc", ".", "H", ".", "T", "*", "dR", "self", ".", "gy_store", "=", "sparse", "(", "[", "[", "dR", ".", "imag", "(", ")", ",", "dR", ".", "real", "(", ")", "]", ",", "[", "dS", ".", "real", "(", ")", ",", "dS", ".", "imag", "(", ")", "]", "]", ")", "return", "self", ".", "gy_store" ]
Generate a list of tiles using the given terrain and numbers options .
def _get_tiles ( board = None , terrain = None , numbers = None ) : if board is not None : # we have a board given, ignore the terrain and numbers opts and log warnings # if they were supplied tiles = _read_tiles_from_string ( board ) else : # we are being asked to generate a board tiles = _generate_tiles ( terrain , numbers ) return tiles
5,905
https://github.com/rosshamish/catan-py/blob/120438a8f16e39c13322c5d5930e1064e1d3f4be/catan/boardbuilder.py#L112-L140
[ "def", "_divide_filter_args", "(", "self", ",", "filter_args", ")", ":", "query_parms", "=", "[", "]", "# query parameter strings", "client_filter_args", "=", "{", "}", "if", "filter_args", "is", "not", "None", ":", "for", "prop_name", "in", "filter_args", ":", "prop_match", "=", "filter_args", "[", "prop_name", "]", "if", "prop_name", "in", "self", ".", "_query_props", ":", "self", ".", "_append_query_parms", "(", "query_parms", ",", "prop_name", ",", "prop_match", ")", "else", ":", "client_filter_args", "[", "prop_name", "]", "=", "prop_match", "query_parms_str", "=", "'&'", ".", "join", "(", "query_parms", ")", "if", "query_parms_str", ":", "query_parms_str", "=", "'?{}'", ".", "format", "(", "query_parms_str", ")", "return", "query_parms_str", ",", "client_filter_args" ]
Generate a list of ports using the given options .
def _get_ports ( port_opts ) : if port_opts in [ Opt . preset , Opt . debug ] : _preset_ports = [ ( 1 , 'NW' , catan . board . PortType . any3 ) , ( 2 , 'W' , catan . board . PortType . wood ) , ( 4 , 'W' , catan . board . PortType . brick ) , ( 5 , 'SW' , catan . board . PortType . any3 ) , ( 6 , 'SE' , catan . board . PortType . any3 ) , ( 8 , 'SE' , catan . board . PortType . sheep ) , ( 9 , 'E' , catan . board . PortType . any3 ) , ( 10 , 'NE' , catan . board . PortType . ore ) , ( 12 , 'NE' , catan . board . PortType . wheat ) ] return [ catan . board . Port ( tile , dir , port_type ) for tile , dir , port_type in _preset_ports ] elif port_opts in [ Opt . empty , Opt . random ] : logging . warning ( '{} option not yet implemented' . format ( port_opts ) ) return [ ]
5,906
https://github.com/rosshamish/catan-py/blob/120438a8f16e39c13322c5d5930e1064e1d3f4be/catan/boardbuilder.py#L231-L258
[ "def", "cache_fake_input", "(", "cls", ",", "weld_input_id", ",", "fake_weld_input", ")", ":", "assert", "isinstance", "(", "weld_input_id", ",", "str", ")", "assert", "isinstance", "(", "fake_weld_input", ",", "_FakeWeldInput", ")", "Cache", ".", "_cache", "[", "weld_input_id", "]", "=", "fake_weld_input" ]
Generate a dictionary of pieces using the given options .
def _get_pieces ( tiles , ports , players_opts , pieces_opts ) : if pieces_opts == Opt . empty : return dict ( ) elif pieces_opts == Opt . debug : players = catan . game . Game . get_debug_players ( ) return { ( hexgrid . NODE , 0x23 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 0 ] ) , ( hexgrid . EDGE , 0x22 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 0 ] ) , ( hexgrid . NODE , 0x67 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 1 ] ) , ( hexgrid . EDGE , 0x98 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 1 ] ) , ( hexgrid . NODE , 0x87 ) : catan . pieces . Piece ( catan . pieces . PieceType . settlement , players [ 2 ] ) , ( hexgrid . EDGE , 0x89 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 2 ] ) , ( hexgrid . EDGE , 0xA9 ) : catan . pieces . Piece ( catan . pieces . PieceType . road , players [ 3 ] ) , ( hexgrid . TILE , 0x77 ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) , } elif pieces_opts in ( Opt . preset , ) : deserts = filter ( lambda tile : tile . terrain == catan . board . Terrain . desert , tiles ) coord = hexgrid . tile_id_to_coord ( list ( deserts ) [ 0 ] . tile_id ) return { ( hexgrid . TILE , coord ) : catan . pieces . Piece ( catan . pieces . PieceType . robber , None ) } elif pieces_opts in ( Opt . random , ) : logging . warning ( '{} option not yet implemented' . format ( pieces_opts ) )
5,907
https://github.com/rosshamish/catan-py/blob/120438a8f16e39c13322c5d5930e1064e1d3f4be/catan/boardbuilder.py#L261-L298
[ "def", "compare_branches_tags_commits", "(", "self", ",", "project_id", ",", "from_id", ",", "to_id", ")", ":", "data", "=", "{", "'from'", ":", "from_id", ",", "'to'", ":", "to_id", "}", "request", "=", "requests", ".", "get", "(", "'{0}/{1}/repository/compare'", ".", "format", "(", "self", ".", "projects_url", ",", "project_id", ")", ",", "params", "=", "data", ",", "verify", "=", "self", ".", "verify_ssl", ",", "auth", "=", "self", ".", "auth", ",", "timeout", "=", "self", ".", "timeout", ",", "headers", "=", "self", ".", "headers", ")", "if", "request", ".", "status_code", "==", "200", ":", "return", "request", ".", "json", "(", ")", "else", ":", "return", "False" ]
Create an enumerated sequence feature
def create_feature ( self , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . create_feature_with_http_info ( * * kwargs ) else : ( data ) = self . create_feature_with_http_info ( * * kwargs ) return data
5,908
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/seqann/feature_client/apis/features_api.py#L53-L78
[ "def", "update_selection", "(", "self", ")", ":", "# clear all boxes", "self", ".", "clear_boxes", "(", ")", "self", ".", "draw_figure", "(", "self", ".", "s", ")", "# update temperature list", "if", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'T_or_MW'", "]", "==", "\"T\"", ":", "self", ".", "temperatures", "=", "np", ".", "array", "(", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'t_Arai'", "]", ")", "-", "273.", "else", ":", "self", ".", "temperatures", "=", "np", ".", "array", "(", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'t_Arai'", "]", ")", "self", ".", "T_list", "=", "[", "\"%.0f\"", "%", "T", "for", "T", "in", "self", ".", "temperatures", "]", "self", ".", "tmin_box", ".", "SetItems", "(", "self", ".", "T_list", ")", "self", ".", "tmax_box", ".", "SetItems", "(", "self", ".", "T_list", ")", "self", ".", "tmin_box", ".", "SetValue", "(", "\"\"", ")", "self", ".", "tmax_box", ".", "SetValue", "(", "\"\"", ")", "self", ".", "Blab_window", ".", "SetValue", "(", "\"%.0f\"", "%", "(", "float", "(", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'pars'", "]", "[", "'lab_dc_field'", "]", ")", "*", "1e6", ")", ")", "if", "\"saved\"", "in", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'pars'", "]", ":", "self", ".", "pars", "=", "self", ".", "Data", "[", "self", ".", "s", "]", "[", "'pars'", "]", "self", ".", "update_GUI_with_new_interpretation", "(", ")", "self", ".", "Add_text", "(", "self", ".", "s", ")", "self", ".", "write_sample_box", "(", ")" ]
List the enumerated sequence features at a locus
def list_features ( self , locus , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_with_http_info ( locus , * * kwargs ) else : ( data ) = self . list_features_with_http_info ( locus , * * kwargs ) return data
5,909
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/seqann/feature_client/apis/features_api.py#L392-L417
[ "def", "update_config", "(", "updated_project", ")", ":", "home", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "home", ",", "'.transfer'", ",", "'config.yaml'", ")", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "home", ",", "'.transfer'", ",", "'config.yaml'", ")", ",", "'r'", ")", "as", "fp", ":", "projects", "=", "yaml", ".", "load", "(", "fp", ".", "read", "(", ")", ")", "replace_index", "=", "-", "1", "for", "i", ",", "project", "in", "enumerate", "(", "projects", ")", ":", "if", "project", "[", "'name'", "]", "==", "updated_project", "[", "'name'", "]", ":", "replace_index", "=", "i", "if", "replace_index", ">", "-", "1", ":", "projects", "[", "replace_index", "]", "=", "updated_project", "store_config", "(", "projects", ")", "else", ":", "print", "(", "'Not saving configuration'", ")", "print", "(", "colored", "(", "'Project: '", "+", "updated_project", "[", "'name'", "]", "+", "' was not found in configured projects!'", ",", "'red'", ")", ")", "else", ":", "print", "(", "'Transfer is not configured.'", ")", "print", "(", "'Please run:'", ")", "print", "(", "''", ")", "print", "(", "colored", "(", "' transfer --configure'", ",", "'cyan'", ")", ")", "return" ]
List the enumerated sequence features matching a term at a locus
def list_features_0 ( self , locus , term , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_0_with_http_info ( locus , term , * * kwargs ) else : ( data ) = self . list_features_0_with_http_info ( locus , term , * * kwargs ) return data
5,910
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/seqann/feature_client/apis/features_api.py#L496-L522
[ "def", "update_config", "(", "updated_project", ")", ":", "home", "=", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "home", ",", "'.transfer'", ",", "'config.yaml'", ")", ")", ":", "with", "open", "(", "os", ".", "path", ".", "join", "(", "home", ",", "'.transfer'", ",", "'config.yaml'", ")", ",", "'r'", ")", "as", "fp", ":", "projects", "=", "yaml", ".", "load", "(", "fp", ".", "read", "(", ")", ")", "replace_index", "=", "-", "1", "for", "i", ",", "project", "in", "enumerate", "(", "projects", ")", ":", "if", "project", "[", "'name'", "]", "==", "updated_project", "[", "'name'", "]", ":", "replace_index", "=", "i", "if", "replace_index", ">", "-", "1", ":", "projects", "[", "replace_index", "]", "=", "updated_project", "store_config", "(", "projects", ")", "else", ":", "print", "(", "'Not saving configuration'", ")", "print", "(", "colored", "(", "'Project: '", "+", "updated_project", "[", "'name'", "]", "+", "' was not found in configured projects!'", ",", "'red'", ")", ")", "else", ":", "print", "(", "'Transfer is not configured.'", ")", "print", "(", "'Please run:'", ")", "print", "(", "''", ")", "print", "(", "colored", "(", "' transfer --configure'", ",", "'cyan'", ")", ")", "return" ]
List the enumerated sequence features matching a term and rank at a locus
def list_features_1 ( self , locus , term , rank , * * kwargs ) : kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . list_features_1_with_http_info ( locus , term , rank , * * kwargs ) else : ( data ) = self . list_features_1_with_http_info ( locus , term , rank , * * kwargs ) return data
5,911
https://github.com/nmdp-bioinformatics/SeqAnn/blob/5ce91559b0a4fbe4fb7758e034eb258202632463/seqann/feature_client/apis/features_api.py#L607-L634
[ "def", "remove_experiment", "(", "self", ",", "id", ")", ":", "if", "id", "in", "self", ".", "experiments", ":", "self", ".", "experiments", ".", "pop", "(", "id", ")", "self", ".", "write_file", "(", ")" ]
Send a client request following HTTP redirects .
def request ( self , method , uri , headers = None , bodyProducer = None ) : if self . _parent_trace is None : trace = Trace ( method ) else : trace = self . _parent_trace . child ( method ) if self . _endpoint is not None : trace . set_endpoint ( self . _endpoint ) if headers is None : headers = Headers ( { } ) # These headers are based on the headers used by finagle's tracing # http Codec. # # https://github.com/twitter/finagle/blob/master/finagle-http/ # # Currently not implemented are X-B3-Sampled and X-B3-Flags # Tryfer's underlying Trace implementation has no notion of a Sampled # trace and I haven't figured out what flags are for. headers . setRawHeaders ( 'X-B3-TraceId' , [ hex_str ( trace . trace_id ) ] ) headers . setRawHeaders ( 'X-B3-SpanId' , [ hex_str ( trace . span_id ) ] ) if trace . parent_span_id is not None : headers . setRawHeaders ( 'X-B3-ParentSpanId' , [ hex_str ( trace . parent_span_id ) ] ) # Similar to the headers above we use the annotation 'http.uri' for # because that is the standard set forth in the finagle http Codec. trace . record ( Annotation . string ( 'http.uri' , uri ) ) trace . record ( Annotation . client_send ( ) ) def _finished ( resp ) : # TODO: It may be advantageous here to return a wrapped response # whose deliverBody can wrap it's protocol and record when the # application has finished reading the contents. trace . record ( Annotation . string ( 'http.responsecode' , '{0} {1}' . format ( resp . code , resp . phrase ) ) ) trace . record ( Annotation . client_recv ( ) ) return resp d = self . _agent . request ( method , uri , headers , bodyProducer ) d . addBoth ( _finished ) return d
5,912
https://github.com/tryfer/tryfer/blob/d4aa45b39eab5ce4b06d6343344afb05a0bf8582/tryfer/http.py#L42-L92
[ "def", "get_time_index_range", "(", "self", ",", "date_search_start", "=", "None", ",", "date_search_end", "=", "None", ",", "time_index_start", "=", "None", ",", "time_index_end", "=", "None", ",", "time_index", "=", "None", ")", ":", "# get the range of time based on datetime range", "time_range", "=", "None", "if", "(", "(", "self", ".", "is_time_variable_valid", "(", ")", "or", "self", ".", "_is_legacy_time_valid", "(", ")", ")", "and", "(", "date_search_start", "is", "not", "None", "or", "date_search_end", "is", "not", "None", ")", ")", ":", "log", "(", "\"Determining time range ({0} to {1})\"", "\"...\"", ".", "format", "(", "date_search_start", ",", "date_search_end", ")", ",", "\"INFO\"", ")", "time_array", "=", "self", ".", "get_time_array", "(", ")", "if", "date_search_start", "is", "not", "None", ":", "date_search_start_utc", "=", "date_search_start", "if", "self", ".", "out_tzinfo", "is", "not", "None", ":", "date_search_start_utc", "=", "self", ".", "out_tzinfo", ".", "localize", "(", "date_search_start", ")", ".", "astimezone", "(", "utc", ")", ".", "replace", "(", "tzinfo", "=", "None", ")", "seconds_start", "=", "(", "date_search_start_utc", "-", "datetime", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", ")", ".", "total_seconds", "(", ")", "time_range", "=", "np", ".", "where", "(", "time_array", ">=", "seconds_start", ")", "[", "0", "]", "if", "date_search_end", "is", "not", "None", ":", "date_search_end_utc", "=", "date_search_end", "if", "self", ".", "out_tzinfo", "is", "not", "None", ":", "date_search_end_utc", "=", "self", ".", "out_tzinfo", ".", "localize", "(", "date_search_end", ")", ".", "astimezone", "(", "utc", ")", ".", "replace", "(", "tzinfo", "=", "None", ")", "seconds_end", "=", "(", "date_search_end_utc", "-", "datetime", ".", "datetime", "(", "1970", ",", "1", ",", "1", ")", ")", ".", "total_seconds", "(", ")", "if", "time_range", "is", "not", "None", ":", "time_range", "=", "np", ".", "intersect1d", "(", "time_range", ",", "np", ".", "where", "(", "time_array", "<=", "seconds_end", ")", "[", "0", "]", ")", "else", ":", "time_range", "=", "np", ".", "where", "(", "time_array", "<=", "seconds_end", ")", "[", "0", "]", "# get the range of time based on time index range", "elif", "time_index_start", "is", "not", "None", "or", "time_index_end", "is", "not", "None", ":", "if", "time_index_start", "is", "None", ":", "time_index_start", "=", "0", "if", "time_index_end", "is", "None", ":", "time_index_end", "=", "self", ".", "size_time", "time_range", "=", "range", "(", "time_index_start", ",", "time_index_end", ")", "# get only one time step", "elif", "time_index", "is", "not", "None", ":", "time_range", "=", "[", "time_index", "]", "# return all", "else", ":", "time_range", "=", "range", "(", "self", ".", "size_time", ")", "return", "time_range" ]
Ensure all values in the dictionary are strings except for the value for candidate which should just be an integer .
def stringify ( data ) : def serialize ( k , v ) : if k == "candidates" : return int ( v ) if isinstance ( v , numbers . Number ) : if k == "zipcode" : # If values are presented as integers then leading digits may be cut off, # and these are significant for the zipcode. Add them back. return str ( v ) . zfill ( 5 ) return str ( v ) return v return [ { k : serialize ( k , v ) for k , v in json_dict . items ( ) } for json_dict in data ]
5,913
https://github.com/bennylope/smartystreets.py/blob/f45e37dd52ea7cec8ed43ce2b64724beb6dbbb69/smartystreets/client.py#L59-L82
[ "def", "toc", "(", "self", ")", ":", "elapsed", "=", "self", ".", "_time", "(", ")", "-", "self", ".", "tstart", "if", "self", ".", "verbose", ":", "self", ".", "write", "(", "'...toc(%r)=%.4fs\\n'", "%", "(", "self", ".", "label", ",", "elapsed", ")", ")", "self", ".", "flush", "(", ")", "return", "elapsed" ]
Executes the HTTP POST request
def post ( self , endpoint , data ) : headers = { "Content-Type" : "application/json" , "Accept" : "application/json" , "x-standardize-only" : "true" if self . standardize else "false" , "x-include-invalid" : "true" if self . invalid else "false" , "x-accept-keypair" : "true" if self . accept_keypair else "false" , } if not self . logging : headers [ "x-suppress-logging" ] = "true" params = { "auth-id" : self . auth_id , "auth-token" : self . auth_token } url = self . BASE_URL + endpoint response = self . session . post ( url , json . dumps ( stringify ( data ) ) , params = params , headers = headers , timeout = self . timeout , ) if response . status_code == 200 : return response . json ( ) raise ERROR_CODES . get ( response . status_code , SmartyStreetsError )
5,914
https://github.com/bennylope/smartystreets.py/blob/f45e37dd52ea7cec8ed43ce2b64724beb6dbbb69/smartystreets/client.py#L128-L158
[ "def", "filename_metadata", "(", "filename", ")", ":", "from", ".", ".", "segments", "import", "Segment", "name", "=", "Path", "(", "filename", ")", ".", "name", "try", ":", "obs", ",", "desc", ",", "start", ",", "dur", "=", "name", ".", "split", "(", "'-'", ")", "except", "ValueError", "as", "exc", ":", "exc", ".", "args", "=", "(", "'Failed to parse {!r} as LIGO-T050017-compatible '", "'filename'", ".", "format", "(", "name", ")", ",", ")", "raise", "start", "=", "float", "(", "start", ")", "dur", "=", "dur", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "while", "True", ":", "# recursively remove extension components", "try", ":", "dur", "=", "float", "(", "dur", ")", "except", "ValueError", ":", "if", "'.'", "not", "in", "dur", ":", "raise", "dur", "=", "dur", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "else", ":", "break", "return", "obs", ",", "desc", ",", "Segment", "(", "start", ",", "start", "+", "dur", ")" ]
API method for verifying street address and geolocating
def street_addresses ( self , addresses ) : # While it's okay in theory to accept freeform addresses they do need to be submitted in # a dictionary format. if type ( addresses [ 0 ] ) != dict : addresses = [ { "street" : arg for arg in addresses } ] return AddressCollection ( self . post ( "street-address" , data = addresses ) )
5,915
https://github.com/bennylope/smartystreets.py/blob/f45e37dd52ea7cec8ed43ce2b64724beb6dbbb69/smartystreets/client.py#L162-L184
[ "def", "doc_modified_prompt", "(", "self", ",", ")", ":", "msgbox", "=", "QtGui", ".", "QMessageBox", "(", ")", "msgbox", ".", "setWindowTitle", "(", "\"Discard changes?\"", ")", "msgbox", ".", "setText", "(", "\"Documents have been modified.\"", ")", "msgbox", ".", "setInformativeText", "(", "\"Do you really want to exit? Changes will be lost!\"", ")", "msgbox", ".", "setStandardButtons", "(", "msgbox", ".", "Yes", "|", "msgbox", ".", "Cancel", ")", "msgbox", ".", "setDefaultButton", "(", "msgbox", ".", "Cancel", ")", "msgbox", ".", "exec_", "(", ")", "return", "msgbox", ".", "result", "(", ")" ]
Geocode one and only address get a single Address object back
def street_address ( self , address ) : address = self . street_addresses ( [ address ] ) if not len ( address ) : return None return Address ( address [ 0 ] )
5,916
https://github.com/bennylope/smartystreets.py/blob/f45e37dd52ea7cec8ed43ce2b64724beb6dbbb69/smartystreets/client.py#L186-L200
[ "def", "upload", "(", "self", ",", "params", "=", "{", "}", ")", ":", "if", "self", ".", "upload_token", "is", "not", "None", ":", "# resume upload", "status", "=", "self", ".", "check", "(", ")", "if", "status", "[", "'status'", "]", "!=", "4", ":", "return", "self", ".", "commit", "(", ")", "else", ":", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")", "else", ":", "# new upload", "self", ".", "create", "(", "self", ".", "prepare_video_params", "(", "*", "*", "params", ")", ")", "self", ".", "create_file", "(", ")", "self", ".", "new_slice", "(", ")", "while", "self", ".", "slice_task_id", "!=", "0", ":", "self", ".", "upload_slice", "(", ")", "return", "self", ".", "commit", "(", ")" ]
Scaffold a validator against a schema .
def load ( schema , uri = None , spec = None , provider = None ) : factory = Factory ( provider , spec ) return factory ( schema , uri or '#' )
5,917
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/validators/__init__.py#L21-L37
[ "def", "after_epoch", "(", "self", ",", "epoch_id", ":", "int", ",", "epoch_data", ":", "EpochData", ")", "->", "None", ":", "logging", ".", "debug", "(", "'Saving epoch %d data to \"%s\"'", ",", "epoch_id", ",", "self", ".", "_file_path", ")", "if", "not", "self", ".", "_header_written", ":", "self", ".", "_write_header", "(", "epoch_data", "=", "epoch_data", ")", "self", ".", "_write_row", "(", "epoch_id", "=", "epoch_id", ",", "epoch_data", "=", "epoch_data", ")" ]
gives qt a starting point for widget size during window resizing
def sizeHint ( self ) : w , h = self . get_width_height ( ) return QtCore . QSize ( w , h )
5,918
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/gui/windows_and_widgets/widgets.py#L416-L421
[ "def", "_read_para_cert", "(", "self", ",", "code", ",", "cbit", ",", "clen", ",", "*", ",", "desc", ",", "length", ",", "version", ")", ":", "_ctgp", "=", "self", ".", "_read_unpack", "(", "1", ")", "_ctct", "=", "self", ".", "_read_unpack", "(", "1", ")", "_ctid", "=", "self", ".", "_read_unpack", "(", "1", ")", "_cttp", "=", "self", ".", "_read_unpack", "(", "1", ")", "_ctdt", "=", "self", ".", "_read_fileng", "(", "clen", "-", "4", ")", "cert", "=", "dict", "(", "type", "=", "desc", ",", "critical", "=", "cbit", ",", "length", "=", "clen", ",", "group", "=", "_GROUP_ID", ".", "get", "(", "_ctgp", ",", "'Unassigned'", ")", ",", "count", "=", "_ctct", ",", "id", "=", "_ctid", ",", "cert_type", "=", "_CERT_TYPE", ".", "get", "(", "_cttp", ",", "'Unassigned'", ")", ",", "certificate", "=", "_ctdt", ",", ")", "_plen", "=", "length", "-", "clen", "if", "_plen", ":", "self", ".", "_read_fileng", "(", "_plen", ")", "return", "cert" ]
Prepare obj to be staged .
def stage ( obj , parent = None , member = None ) : obj = Staged ( obj , parent , member ) if isinstance ( obj , Mapping ) : for key , value in obj . items ( ) : stage ( value , obj , key ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : for index , value in enumerate ( obj ) : stage ( value , obj , index ) elif isinstance ( obj , Set ) : for value in obj : stage ( value , obj , None ) return obj
5,919
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/stages.py#L50-L68
[ "def", "default_bitcoind_opts", "(", "config_file", "=", "None", ",", "prefix", "=", "False", ")", ":", "default_bitcoin_opts", "=", "virtualchain", ".", "get_bitcoind_config", "(", "config_file", "=", "config_file", ")", "# drop dict values that are None", "default_bitcoin_opts", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "default_bitcoin_opts", ".", "items", "(", ")", "if", "v", "is", "not", "None", "}", "# strip 'bitcoind_'", "if", "not", "prefix", ":", "default_bitcoin_opts", "=", "opt_strip", "(", "'bitcoind_'", ",", "default_bitcoin_opts", ")", "return", "default_bitcoin_opts" ]
Delete old files we ve uploaded to S3 according to grandfather father sun strategy
def rotate ( key_prefix , key_ext , bucket_name , daily_backups = 7 , weekly_backups = 4 , aws_key = None , aws_secret = None ) : session = boto3 . Session ( aws_access_key_id = aws_key , aws_secret_access_key = aws_secret ) s3 = session . resource ( 's3' ) bucket = s3 . Bucket ( bucket_name ) keys = bucket . objects . filter ( Prefix = key_prefix ) regex = '{0}-(?P<year>[\d]+?)-(?P<month>[\d]+?)-(?P<day>[\d]+?){1}' . format ( key_prefix , key_ext ) backups = [ ] for key in keys : match = re . match ( regex , str ( key . key ) ) if not match : continue year = int ( match . group ( 'year' ) ) month = int ( match . group ( 'month' ) ) day = int ( match . group ( 'day' ) ) key_date = datetime ( year , month , day ) backups [ : 0 ] = [ key_date ] backups = sorted ( backups , reverse = True ) if len ( backups ) > daily_backups + 1 and backups [ daily_backups ] - backups [ daily_backups + 1 ] < timedelta ( days = 7 ) : key = bucket . Object ( "{0}{1}{2}" . format ( key_prefix , backups [ daily_backups ] . strftime ( "-%Y-%m-%d" ) , key_ext ) ) logger . debug ( "deleting {0}" . format ( key ) ) key . delete ( ) del backups [ daily_backups ] month_offset = daily_backups + weekly_backups if len ( backups ) > month_offset + 1 and backups [ month_offset ] - backups [ month_offset + 1 ] < timedelta ( days = 30 ) : key = bucket . Object ( "{0}{1}{2}" . format ( key_prefix , backups [ month_offset ] . strftime ( "-%Y-%m-%d" ) , key_ext ) ) logger . debug ( "deleting {0}" . format ( key ) ) key . delete ( ) del backups [ month_offset ]
5,920
https://github.com/dirkcuys/s3-backup-rotate/blob/ab226c7b636550823a9c91e3ebd81776d255f204/dcu/active_memory/rotate.py#L10-L46
[ "def", "get_experiment_time", "(", "port", ")", ":", "response", "=", "rest_get", "(", "experiment_url", "(", "port", ")", ",", "REST_TIME_OUT", ")", "if", "response", "and", "check_response", "(", "response", ")", ":", "content", "=", "convert_time_stamp_to_date", "(", "json", ".", "loads", "(", "response", ".", "text", ")", ")", "return", "content", ".", "get", "(", "'startTime'", ")", ",", "content", ".", "get", "(", "'endTime'", ")", "return", "None", ",", "None" ]
Return the filename and extension according to the first dot in the filename . This helps date stamping . tar . bz2 or . ext . gz files properly .
def splitext ( filename ) : index = filename . find ( '.' ) if index == 0 : index = 1 + filename [ 1 : ] . find ( '.' ) if index == - 1 : return filename , '' return filename [ : index ] , filename [ index : ] return os . path . splitext ( filename )
5,921
https://github.com/dirkcuys/s3-backup-rotate/blob/ab226c7b636550823a9c91e3ebd81776d255f204/dcu/active_memory/rotate.py#L49-L59
[ "def", "stopReceivingBoxes", "(", "self", ",", "reason", ")", ":", "AMP", ".", "stopReceivingBoxes", "(", "self", ",", "reason", ")", "log", ".", "removeObserver", "(", "self", ".", "_emit", ")" ]
Launches the fitting routine on another thread
def start_fitting ( self ) : self . queue = queue . Queue ( ) self . peak_vals = [ ] self . fit_thread = QThread ( ) #must be assigned as an instance variable, not local, as otherwise thread is garbage #collected immediately at the end of the function before it runs self . fitobj = self . do_fit ( str ( self . data_filepath . text ( ) ) , self . matplotlibwidget , self . queue , self . peak_vals , self . peak_locs ) self . fitobj . moveToThread ( self . fit_thread ) self . fit_thread . started . connect ( self . fitobj . run ) self . fitobj . finished . connect ( self . fit_thread . quit ) # clean up. quit thread after script is finished self . fitobj . status . connect ( self . update_status ) self . fit_thread . start ( )
5,922
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/gui/manual_fitting.py#L326-L339
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Compute difference in conduction band min and valence band max
def _get_bandgap_from_bands ( energies , nelec ) : nelec = int ( nelec ) valence = [ x [ nelec - 1 ] for x in energies ] conduction = [ x [ nelec ] for x in energies ] return max ( min ( conduction ) - max ( valence ) , 0.0 )
5,923
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/vasp.py#L331-L336
[ "def", "parse_registries", "(", "filesystem", ",", "registries", ")", ":", "results", "=", "{", "}", "for", "path", "in", "registries", ":", "with", "NamedTemporaryFile", "(", "buffering", "=", "0", ")", "as", "tempfile", ":", "filesystem", ".", "download", "(", "path", ",", "tempfile", ".", "name", ")", "registry", "=", "RegistryHive", "(", "tempfile", ".", "name", ")", "registry", ".", "rootkey", "=", "registry_root", "(", "path", ")", "results", ".", "update", "(", "{", "k", ".", "path", ":", "(", "k", ".", "timestamp", ",", "k", ".", "values", ")", "for", "k", "in", "registry", ".", "keys", "(", ")", "}", ")", "return", "results" ]
Get the bandgap from the EIGENVAL file
def _get_bandgap_eigenval ( eigenval_fname , outcar_fname ) : with open ( outcar_fname , "r" ) as f : parser = OutcarParser ( ) nelec = next ( iter ( filter ( lambda x : "number of electrons" in x , parser . parse ( f . readlines ( ) ) ) ) ) [ "number of electrons" ] with open ( eigenval_fname , "r" ) as f : eigenval_info = list ( EigenvalParser ( ) . parse ( f . readlines ( ) ) ) # spin_polarized = (2 == len(next(filter(lambda x: "kpoint" in x, eigenval_info))["occupancies"][0])) # if spin_polarized: all_energies = [ zip ( * x [ "energies" ] ) for x in eigenval_info if "energies" in x ] spin_energies = zip ( * all_energies ) gaps = [ VaspParser . _get_bandgap_from_bands ( x , nelec / 2.0 ) for x in spin_energies ] return min ( gaps )
5,924
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/vasp.py#L339-L351
[ "def", "archive_namespace", "(", "self", ")", ":", "try", ":", "for", "ns_prefix", ",", "url", "in", "self", ".", "feed", ".", "namespaces", ".", "items", "(", ")", ":", "if", "url", "==", "'http://purl.org/syndication/history/1.0'", ":", "return", "ns_prefix", "except", "AttributeError", ":", "pass", "return", "None" ]
Get the bandgap from the DOSCAR file
def _get_bandgap_doscar ( filename ) : with open ( filename ) as fp : for i in range ( 6 ) : l = fp . readline ( ) efermi = float ( l . split ( ) [ 3 ] ) step1 = fp . readline ( ) . split ( ) [ 0 ] step2 = fp . readline ( ) . split ( ) [ 0 ] step_size = float ( step2 ) - float ( step1 ) not_found = True while not_found : l = fp . readline ( ) . split ( ) e = float ( l . pop ( 0 ) ) dens = 0.0 for i in range ( int ( len ( l ) / 2 ) ) : dens += float ( l [ i ] ) if e < efermi and dens > 1e-3 : bot = e elif e > efermi and dens > 1e-3 : top = e not_found = False if top - bot < step_size * 2 : bandgap = 0.0 else : bandgap = float ( top - bot ) return bandgap
5,925
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/vasp.py#L354-L380
[ "def", "extract_keypairs", "(", "lines", ",", "regexer", ")", ":", "updates", "=", "{", "}", "for", "line", "in", "lines", ":", "# for consistency we must match the replacer and strip whitespace / newlines", "match", "=", "regexer", ".", "match", "(", "line", ".", "strip", "(", ")", ")", "if", "not", "match", ":", "continue", "k_v", "=", "match", ".", "groupdict", "(", ")", "updates", "[", "k_v", "[", "Constants", ".", "KEY_GROUP", "]", "]", "=", "k_v", "[", "Constants", ".", "VALUE_GROUP", "]", "return", "updates" ]
Get the bandgap either from the EIGENVAL or DOSCAR files
def get_band_gap ( self ) : if self . outcar is not None and self . eignval is not None : bandgap = VaspParser . _get_bandgap_eigenval ( self . eignval , self . outcar ) elif self . doscar is not None : bandgap = VaspParser . _get_bandgap_doscar ( self . doscar ) else : return None return Property ( scalars = [ Scalar ( value = round ( bandgap , 3 ) ) ] , units = 'eV' )
5,926
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/vasp.py#L382-L390
[ "def", "WriteClientSnapshot", "(", "self", ",", "snapshot", ")", ":", "client_id", "=", "snapshot", ".", "client_id", "if", "client_id", "not", "in", "self", ".", "metadatas", ":", "raise", "db", ".", "UnknownClientError", "(", "client_id", ")", "startup_info", "=", "snapshot", ".", "startup_info", "snapshot", ".", "startup_info", "=", "None", "ts", "=", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", "history", "=", "self", ".", "clients", ".", "setdefault", "(", "client_id", ",", "{", "}", ")", "history", "[", "ts", "]", "=", "snapshot", ".", "SerializeToString", "(", ")", "history", "=", "self", ".", "startup_history", ".", "setdefault", "(", "client_id", ",", "{", "}", ")", "history", "[", "ts", "]", "=", "startup_info", ".", "SerializeToString", "(", ")", "snapshot", ".", "startup_info", "=", "startup_info" ]
Get raster value by xy coordinates .
def get_value_by_xy ( self , x , y ) : if x < self . xMin or x > self . xMax or y < self . yMin or y > self . yMax : return None # raise ValueError("The x or y value must be within the Min and Max!") else : row = self . nRows - int ( numpy . ceil ( ( y - self . yMin ) / self . dx ) ) col = int ( numpy . floor ( ( x - self . xMin ) / self . dx ) ) value = self . data [ row ] [ col ] if value == self . noDataValue : return None else : return value
5,927
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L196-L216
[ "def", "_init_libcrypto", "(", ")", ":", "libcrypto", "=", "_load_libcrypto", "(", ")", "try", ":", "libcrypto", ".", "OPENSSL_init_crypto", "(", ")", "except", "AttributeError", ":", "# Support for OpenSSL < 1.1 (OPENSSL_API_COMPAT < 0x10100000L)", "libcrypto", ".", "OPENSSL_no_config", "(", ")", "libcrypto", ".", "OPENSSL_add_all_algorithms_noconf", "(", ")", "libcrypto", ".", "RSA_new", ".", "argtypes", "=", "(", ")", "libcrypto", ".", "RSA_new", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "RSA_size", ".", "argtype", "=", "(", "c_void_p", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "argtypes", "=", "(", "c_char_p", ",", "c_int", ")", "libcrypto", ".", "BIO_new_mem_buf", ".", "restype", "=", "c_void_p", "libcrypto", ".", "BIO_free", ".", "argtypes", "=", "(", "c_void_p", ",", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSAPrivateKey", ".", "restype", "=", "c_void_p", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "argtypes", "=", "(", "c_void_p", ",", "c_void_p", ",", "c_void_p", ",", "c_void_p", ")", "libcrypto", ".", "PEM_read_bio_RSA_PUBKEY", ".", "restype", "=", "c_void_p", "libcrypto", ".", "RSA_private_encrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "libcrypto", ".", "RSA_public_decrypt", ".", "argtypes", "=", "(", "c_int", ",", "c_char_p", ",", "c_char_p", ",", "c_void_p", ",", "c_int", ")", "return", "libcrypto" ]
Get the coordinates of central grid .
def get_central_coors ( self , row , col ) : if row < 0 or row >= self . nRows or col < 0 or col >= self . nCols : raise ValueError ( "The row (%d) or col (%d) must be >=0 and less than " "nRows (%d) or nCols (%d)!" % ( row , col , self . nRows , self . nCols ) ) else : tmpx = self . xMin + ( col + 0.5 ) * self . dx tmpy = self . yMax - ( row + 0.5 ) * self . dx return tmpx , tmpy
5,928
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L218-L234
[ "def", "base_mortality_rate", "(", "self", ",", "index", ":", "pd", ".", "Index", ")", "->", "pd", ".", "Series", ":", "return", "pd", ".", "Series", "(", "self", ".", "config", ".", "mortality_rate", ",", "index", "=", "index", ")" ]
Read raster by GDAL .
def read_raster ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) data = band . ReadAsArray ( ) xsize = band . XSize ysize = band . YSize nodata_value = band . GetNoDataValue ( ) geotrans = ds . GetGeoTransform ( ) dttype = band . DataType srs = osr_SpatialReference ( ) srs . ImportFromWkt ( ds . GetProjection ( ) ) # print(srs.ExportToProj4()) if nodata_value is None : nodata_value = DEFAULT_NODATA band = None ds = None return Raster ( ysize , xsize , data , nodata_value , geotrans , srs , dttype )
5,929
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L249-L275
[ "def", "cli", "(", "sock", ",", "configs", ",", "modules", ",", "files", ",", "log", ",", "debug", ")", ":", "setup_logging", "(", "log", ",", "debug", ")", "config", "=", "join_configs", "(", "configs", ")", "# load python modules", "load_modules", "(", "modules", ")", "# load python files", "load_files", "(", "files", ")", "# summarize active events and callbacks", "summarize_events", "(", ")", "gloop", "=", "gevent", ".", "Greenlet", ".", "spawn", "(", "loop", ",", "sock", "=", "sock", ",", "config", "=", "config", ")", "gloop", ".", "start", "(", ")", "gloop", ".", "join", "(", ")" ]
Generate mask data from a given raster data .
def get_mask_from_raster ( rasterfile , outmaskfile , keep_nodata = False ) : raster_r = RasterUtilClass . read_raster ( rasterfile ) xsize = raster_r . nCols ysize = raster_r . nRows nodata_value = raster_r . noDataValue srs = raster_r . srs x_min = raster_r . xMin y_max = raster_r . yMax dx = raster_r . dx data = raster_r . data if not keep_nodata : i_min = ysize - 1 i_max = 0 j_min = xsize - 1 j_max = 0 for i in range ( ysize ) : for j in range ( xsize ) : if abs ( data [ i ] [ j ] - nodata_value ) > DELTA : i_min = min ( i , i_min ) i_max = max ( i , i_max ) j_min = min ( j , j_min ) j_max = max ( j , j_max ) # print(i_min, i_max, j_min, j_max) y_size_mask = i_max - i_min + 1 x_size_mask = j_max - j_min + 1 x_min_mask = x_min + j_min * dx y_max_mask = y_max - i_min * dx else : y_size_mask = ysize x_size_mask = xsize x_min_mask = x_min y_max_mask = y_max i_min = 0 j_min = 0 print ( '%dx%d -> %dx%d' % ( xsize , ysize , x_size_mask , y_size_mask ) ) mask = numpy . zeros ( ( y_size_mask , x_size_mask ) ) for i in range ( y_size_mask ) : for j in range ( x_size_mask ) : if abs ( data [ i + i_min ] [ j + j_min ] - nodata_value ) > DELTA : mask [ i ] [ j ] = 1 else : mask [ i ] [ j ] = DEFAULT_NODATA mask_geotrans = [ x_min_mask , dx , 0 , y_max_mask , 0 , - dx ] RasterUtilClass . write_gtiff_file ( outmaskfile , y_size_mask , x_size_mask , mask , mask_geotrans , srs , DEFAULT_NODATA , GDT_Int32 ) return Raster ( y_size_mask , x_size_mask , mask , DEFAULT_NODATA , mask_geotrans , srs )
5,930
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L278-L337
[ "def", "cli", "(", "sock", ",", "configs", ",", "modules", ",", "files", ",", "log", ",", "debug", ")", ":", "setup_logging", "(", "log", ",", "debug", ")", "config", "=", "join_configs", "(", "configs", ")", "# load python modules", "load_modules", "(", "modules", ")", "# load python files", "load_files", "(", "files", ")", "# summarize active events and callbacks", "summarize_events", "(", ")", "gloop", "=", "gevent", ".", "Greenlet", ".", "spawn", "(", "loop", ",", "sock", "=", "sock", ",", "config", "=", "config", ")", "gloop", ".", "start", "(", ")", "gloop", ".", "join", "(", ")" ]
Reclassify raster by given classifier dict .
def raster_reclassify ( srcfile , v_dict , dstfile , gdaltype = GDT_Float32 ) : src_r = RasterUtilClass . read_raster ( srcfile ) src_data = src_r . data dst_data = numpy . copy ( src_data ) if gdaltype == GDT_Float32 and src_r . dataType != GDT_Float32 : gdaltype = src_r . dataType no_data = src_r . noDataValue new_no_data = DEFAULT_NODATA if gdaltype in [ GDT_Unknown , GDT_Byte , GDT_UInt16 , GDT_UInt32 ] : new_no_data = 0 if not MathClass . floatequal ( new_no_data , src_r . noDataValue ) : if src_r . noDataValue not in v_dict : v_dict [ src_r . noDataValue ] = new_no_data no_data = new_no_data for ( k , v ) in iteritems ( v_dict ) : dst_data [ src_data == k ] = v RasterUtilClass . write_gtiff_file ( dstfile , src_r . nRows , src_r . nCols , dst_data , src_r . geotrans , src_r . srs , no_data , gdaltype )
5,931
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L340-L366
[ "def", "template_sphere", "(", "radius", ",", "dimensions", ")", ":", "if", "int", "(", "dimensions", ")", "!=", "dimensions", ":", "raise", "TypeError", "(", "'The supplied dimension parameter must be of type integer.'", ")", "dimensions", "=", "int", "(", "dimensions", ")", "return", "template_ellipsoid", "(", "dimensions", "*", "[", "radius", "*", "2", "]", ")" ]
Output Raster to GeoTiff format file .
def write_gtiff_file ( f_name , n_rows , n_cols , data , geotransform , srs , nodata_value , gdal_type = GDT_Float32 ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( f_name ) ) ) driver = gdal_GetDriverByName ( str ( 'GTiff' ) ) try : ds = driver . Create ( f_name , n_cols , n_rows , 1 , gdal_type ) except Exception : print ( 'Cannot create output file %s' % f_name ) return ds . SetGeoTransform ( geotransform ) try : ds . SetProjection ( srs . ExportToWkt ( ) ) except AttributeError or Exception : ds . SetProjection ( srs ) ds . GetRasterBand ( 1 ) . SetNoDataValue ( nodata_value ) # if data contains numpy.nan, then replaced by nodata_value if isinstance ( data , numpy . ndarray ) and data . dtype in [ numpy . dtype ( 'int' ) , numpy . dtype ( 'float' ) ] : data = numpy . where ( numpy . isnan ( data ) , nodata_value , data ) ds . GetRasterBand ( 1 ) . WriteArray ( data ) ds = None
5,932
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L369-L402
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Output Raster to ASCII file .
def write_asc_file ( filename , data , xsize , ysize , geotransform , nodata_value ) : UtilClass . mkdir ( os . path . dirname ( FileClass . get_file_fullpath ( filename ) ) ) header = 'NCOLS %d\n' 'NROWS %d\n' 'XLLCENTER %f\n' 'YLLCENTER %f\n' 'CELLSIZE %f\n' 'NODATA_VALUE %f' % ( xsize , ysize , geotransform [ 0 ] + 0.5 * geotransform [ 1 ] , geotransform [ 3 ] - ( ysize - 0.5 ) * geotransform [ 1 ] , geotransform [ 1 ] , nodata_value ) with open ( filename , 'w' , encoding = 'utf-8' ) as f : f . write ( header ) for i in range ( 0 , ysize ) : for j in range ( 0 , xsize ) : f . write ( '%s\t' % repr ( data [ i ] [ j ] ) ) f . write ( '\n' ) f . close ( )
5,933
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L405-L432
[ "def", "subtract", "(", "self", ",", "jobShape", ")", ":", "self", ".", "shape", "=", "Shape", "(", "self", ".", "shape", ".", "wallTime", ",", "self", ".", "shape", ".", "memory", "-", "jobShape", ".", "memory", ",", "self", ".", "shape", ".", "cores", "-", "jobShape", ".", "cores", ",", "self", ".", "shape", ".", "disk", "-", "jobShape", ".", "disk", ",", "self", ".", "shape", ".", "preemptable", ")" ]
Converting Raster format to GeoTIFF .
def raster_to_gtiff ( tif , geotif , change_nodata = False , change_gdal_type = False ) : rst_file = RasterUtilClass . read_raster ( tif ) nodata = rst_file . noDataValue if change_nodata : if not MathClass . floatequal ( rst_file . noDataValue , DEFAULT_NODATA ) : nodata = DEFAULT_NODATA rst_file . data [ rst_file . data == rst_file . noDataValue ] = DEFAULT_NODATA gdal_type = rst_file . dataType if change_gdal_type : gdal_type = GDT_Float32 RasterUtilClass . write_gtiff_file ( geotif , rst_file . nRows , rst_file . nCols , rst_file . data , rst_file . geotrans , rst_file . srs , nodata , gdal_type )
5,934
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L435-L456
[ "def", "list_listeners", "(", "self", ",", "retrieve_all", "=", "True", ",", "*", "*", "_params", ")", ":", "return", "self", ".", "list", "(", "'listeners'", ",", "self", ".", "lbaas_listeners_path", ",", "retrieve_all", ",", "*", "*", "_params", ")" ]
Converting Raster format to ASCII raster .
def raster_to_asc ( raster_f , asc_f ) : raster_r = RasterUtilClass . read_raster ( raster_f ) RasterUtilClass . write_asc_file ( asc_f , raster_r . data , raster_r . nCols , raster_r . nRows , raster_r . geotrans , raster_r . noDataValue )
5,935
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L459-L468
[ "def", "_remove_player", "(", "self", ",", "player_id", ")", ":", "player", "=", "self", ".", "_mpris_players", ".", "get", "(", "player_id", ")", "if", "player", ":", "if", "player", ".", "get", "(", "\"subscription\"", ")", ":", "player", "[", "\"subscription\"", "]", ".", "disconnect", "(", ")", "del", "self", ".", "_mpris_players", "[", "player_id", "]" ]
Get basic statistics of raster data .
def raster_statistics ( raster_file ) : ds = gdal_Open ( raster_file ) band = ds . GetRasterBand ( 1 ) minv , maxv , meanv , std = band . ComputeStatistics ( False ) return minv , maxv , meanv , std
5,936
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L471-L483
[ "def", "delete_subscription", "(", "self", ",", "subscription_id", ",", "client_id", ",", "client_secret", ")", ":", "self", ".", "protocol", ".", "delete", "(", "'/push_subscriptions/{id}'", ",", "id", "=", "subscription_id", ",", "client_id", "=", "client_id", ",", "client_secret", "=", "client_secret", ",", "use_webhook_server", "=", "True", ")" ]
Split raster by given shapefile and field name .
def split_raster ( rs , split_shp , field_name , temp_dir ) : UtilClass . rmmkdir ( temp_dir ) ds = ogr_Open ( split_shp ) lyr = ds . GetLayer ( 0 ) lyr . ResetReading ( ) ft = lyr . GetNextFeature ( ) while ft : cur_field_name = ft . GetFieldAsString ( field_name ) for r in rs : cur_file_name = r . split ( os . sep ) [ - 1 ] outraster = temp_dir + os . sep + cur_file_name . replace ( '.tif' , '_%s.tif' % cur_field_name . replace ( ' ' , '_' ) ) subprocess . call ( [ 'gdalwarp' , r , outraster , '-cutline' , split_shp , '-crop_to_cutline' , '-cwhere' , "'%s'='%s'" % ( field_name , cur_field_name ) , '-dstnodata' , '-9999' ] ) ft = lyr . GetNextFeature ( ) ds = None
5,937
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L486-L512
[ "def", "save_comment", "(", "self", ",", "comment_form", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Implemented from kitosid template for -", "# osid.resource.ResourceAdminSession.update_resource", "if", "comment_form", ".", "is_for_update", "(", ")", ":", "return", "self", ".", "update_comment", "(", "comment_form", ",", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "return", "self", ".", "create_comment", "(", "comment_form", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Get negative DEM data .
def get_negative_dem ( raw_dem , neg_dem ) : origin = RasterUtilClass . read_raster ( raw_dem ) max_v = numpy . max ( origin . data ) temp = origin . data < 0 neg = numpy . where ( temp , origin . noDataValue , max_v - origin . data ) RasterUtilClass . write_gtiff_file ( neg_dem , origin . nRows , origin . nCols , neg , origin . geotrans , origin . srs , origin . noDataValue , origin . dataType )
5,938
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L515-L522
[ "def", "create_index", "(", "self", ",", "collection", ",", "index_name", ",", "*", "*", "kwargs", ")", ":", "try", ":", "self", ".", "connection", "[", "collection", "]", ".", "create_index", "(", "index_name", ",", "*", "*", "kwargs", ")", "except", "Exception", "as", "exc", ":", "LOG", ".", "warn", "(", "\"Error tuning mongodb database: %s\"", ",", "exc", ")" ]
Make the raster into binarization .
def raster_binarization ( given_value , rasterfilename ) : origin_raster = RasterUtilClass . read_raster ( rasterfilename ) binary_raster = numpy . where ( origin_raster . data == given_value , 1 , 0 ) return binary_raster
5,939
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L567-L583
[ "def", "get_owned_subscriptions", "(", "self", ",", "server_id", ")", ":", "# Validate server_id", "self", ".", "_get_server", "(", "server_id", ")", "return", "list", "(", "self", ".", "_owned_subscriptions", "[", "server_id", "]", ")" ]
Erode the raster image .
def raster_erosion ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return "Your rasterfile has a wrong type. Type must be string or " "numpy.array or class Raster in pygeoc." max_value_raster = origin_raster . max ( ) erosion_raster = numpy . zeros ( ( origin_raster . shape [ 0 ] , origin_raster . shape [ 1 ] ) ) # In order to compute the raster edges, we need to expand the original # raster's rows and cols. We need to add the edges whose pixels' value is # the max pixel's value in raster. add_row = numpy . full ( ( 1 , origin_raster . shape [ 1 ] ) , max_value_raster ) temp_origin_raster = numpy . vstack ( ( numpy . vstack ( ( add_row , origin_raster ) ) , add_row ) ) add_col = numpy . full ( ( origin_raster . shape [ 0 ] + 2 , 1 ) , max_value_raster ) expand_origin_raster = numpy . hstack ( ( numpy . hstack ( ( add_col , temp_origin_raster ) ) , add_col ) ) # Erode the raster. for i in range ( origin_raster . shape [ 0 ] ) : for j in range ( origin_raster . shape [ 1 ] ) : min_pixel_value = max_value_raster # Find the min pixel value in the 8-neighborhood. for k in range ( 3 ) : for l in range ( 3 ) : if expand_origin_raster [ i + k , j + l ] <= min_pixel_value : min_pixel_value = expand_origin_raster [ i + k , j + l ] # After this loop, we get the min pixel's value of the # 8-neighborhood. Then we change the compute pixel's value into # the min pixel's value. erosion_raster [ i , j ] = min_pixel_value # Return the result. return erosion_raster
5,940
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L586-L631
[ "def", "create_stream_subscription", "(", "self", ",", "stream", ",", "on_data", ",", "timeout", "=", "60", ")", ":", "options", "=", "rest_pb2", ".", "StreamSubscribeRequest", "(", ")", "options", ".", "stream", "=", "stream", "manager", "=", "WebSocketSubscriptionManager", "(", "self", ".", "_client", ",", "resource", "=", "'stream'", ",", "options", "=", "options", ")", "# Represent subscription as a future", "subscription", "=", "WebSocketSubscriptionFuture", "(", "manager", ")", "wrapped_callback", "=", "functools", ".", "partial", "(", "_wrap_callback_parse_stream_data", ",", "subscription", ",", "on_data", ")", "manager", ".", "open", "(", "wrapped_callback", ",", "instance", "=", "self", ".", "_instance", ")", "# Wait until a reply or exception is received", "subscription", ".", "reply", "(", "timeout", "=", "timeout", ")", "return", "subscription" ]
Dilate the raster image .
def raster_dilation ( rasterfile ) : if is_string ( rasterfile ) : origin_raster = RasterUtilClass . read_raster ( str ( rasterfile ) ) elif isinstance ( rasterfile , Raster ) : origin_raster = rasterfile . data elif isinstance ( rasterfile , numpy . ndarray ) : origin_raster = rasterfile else : return 'Your rasterfile has a wrong type. Type must be string or ' 'numpy.array or class Raster in pygeoc.' min_value_raster = origin_raster . min ( ) dilation_raster = numpy . zeros ( ( origin_raster . shape [ 0 ] , origin_raster . shape [ 1 ] ) ) # In order to compute the raster edges, we need to expand the original # raster's rows and cols. We need to add the edges whose pixels' value is # the max pixel's value in raster. add_row = numpy . full ( ( 1 , origin_raster . shape [ 1 ] ) , min_value_raster ) temp_origin_raster = numpy . vstack ( ( numpy . vstack ( ( add_row , origin_raster ) ) , add_row ) ) add_col = numpy . full ( ( origin_raster . shape [ 0 ] + 2 , 1 ) , min_value_raster ) expand_origin_raster = numpy . hstack ( ( numpy . hstack ( ( add_col , temp_origin_raster ) ) , add_col ) ) # Dilate the raster. for i in range ( origin_raster . shape [ 0 ] ) : for j in range ( origin_raster . shape [ 1 ] ) : max_pixel_value = min_value_raster # Find the max pixel value in the 8-neighborhood. for k in range ( 3 ) : for l in range ( 3 ) : if expand_origin_raster [ i + k , j + l ] >= max_pixel_value : max_pixel_value = expand_origin_raster [ i + k , j + l ] # After this loop, we get the max pixel's value of the # 8-neighborhood. Then we change the compute pixel's value into # the max pixel's value. dilation_raster [ i , j ] = max_pixel_value # Return the result. return dilation_raster
5,941
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L634-L679
[ "def", "list_listeners", "(", "self", ",", "retrieve_all", "=", "True", ",", "*", "*", "_params", ")", ":", "return", "self", ".", "list", "(", "'listeners'", ",", "self", ".", "lbaas_listeners_path", ",", "retrieve_all", ",", "*", "*", "_params", ")" ]
Do openning .
def openning ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) openning_raster = input_raster for i in range ( times ) : openning_raster = RasterUtilClass . raster_erosion ( openning_raster ) for i in range ( times ) : openning_raster = RasterUtilClass . raster_dilation ( openning_raster ) return openning_raster
5,942
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L682-L700
[ "def", "logout", "(", "request", ",", "template_name", "=", "None", ",", "next_page", "=", "None", ",", "redirect_field_name", "=", "REDIRECT_FIELD_NAME", ",", "current_app", "=", "None", ",", "extra_context", "=", "None", ")", ":", "auth_logout", "(", "request", ")", "if", "next_page", "is", "not", "None", ":", "next_page", "=", "resolve_url", "(", "next_page", ")", "if", "(", "redirect_field_name", "in", "request", ".", "POST", "or", "redirect_field_name", "in", "request", ".", "GET", ")", ":", "next_page", "=", "request", ".", "POST", ".", "get", "(", "redirect_field_name", ",", "request", ".", "GET", ".", "get", "(", "redirect_field_name", ")", ")", "# Security check -- don't allow redirection to a different host.", "if", "not", "is_safe_url", "(", "url", "=", "next_page", ",", "host", "=", "request", ".", "get_host", "(", ")", ")", ":", "next_page", "=", "request", ".", "path", "if", "next_page", ":", "# Redirect to this page until the session has been cleared.", "return", "HttpResponseRedirect", "(", "next_page", ")", "current_site", "=", "get_current_site", "(", "request", ")", "context", "=", "{", "'site'", ":", "current_site", ",", "'site_name'", ":", "current_site", ".", "name", ",", "'title'", ":", "_", "(", "'Logged out'", ")", "}", "if", "extra_context", "is", "not", "None", ":", "context", ".", "update", "(", "extra_context", ")", "if", "current_app", "is", "not", "None", ":", "request", ".", "current_app", "=", "current_app", "return", "TemplateResponse", "(", "request", ",", "template_name", ",", "context", ")" ]
Do closing .
def closing ( input_rasterfilename , times ) : input_raster = RasterUtilClass . read_raster ( input_rasterfilename ) closing_raster = input_raster for i in range ( times ) : closing_raster = RasterUtilClass . raster_dilation ( closing_raster ) for i in range ( times ) : closing_raster = RasterUtilClass . raster_erosion ( closing_raster ) return closing_raster
5,943
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/raster.py#L703-L721
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
return tx fee from tx size in bytes
def calculate_tx_fee ( tx_size : int ) -> Decimal : per_kb_cost = 0.01 min_fee = Decimal ( 0.001 ) fee = Decimal ( ( tx_size / 1000 ) * per_kb_cost ) if fee <= min_fee : return min_fee else : return fee
5,944
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L214-L225
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
p2sh embedding p2pkh
def p2sh_p2pkh_script ( network : str , address : str ) -> P2shScript : network_params = net_query ( network ) addr = Address . from_string ( network = network_params , string = address ) p2pkh = P2pkhScript ( addr ) return P2shScript ( p2pkh )
5,945
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L246-L256
[ "def", "LogHttpAdminUIAccess", "(", "self", ",", "request", ",", "response", ")", ":", "# TODO(user): generate event_id elsewhere and use it for all the log", "# messages that have to do with handling corresponding request.", "event_id", "=", "self", ".", "GetNewEventId", "(", ")", "api_method", "=", "response", ".", "headers", ".", "get", "(", "\"X-API-Method\"", ",", "\"unknown\"", ")", "api_reason", "=", "response", ".", "headers", ".", "get", "(", "\"X-GRR-Reason\"", ",", "\"none\"", ")", "log_msg", "=", "\"%s API call [%s] by %s (reason: %s): %s [%d]\"", "%", "(", "event_id", ",", "api_method", ",", "request", ".", "user", ",", "api_reason", ",", "request", ".", "full_path", ",", "response", ".", "status_code", ")", "logging", ".", "info", "(", "log_msg", ")", "if", "response", ".", "headers", ".", "get", "(", "\"X-No-Log\"", ")", "!=", "\"True\"", ":", "if", "data_store", ".", "RelationalDBEnabled", "(", ")", ":", "entry", "=", "rdf_objects", ".", "APIAuditEntry", ".", "FromHttpRequestResponse", "(", "request", ",", "response", ")", "data_store", ".", "REL_DB", ".", "WriteAPIAuditEntry", "(", "entry", ")" ]
create TxOut object
def tx_output ( network : str , value : Decimal , n : int , script : ScriptSig ) -> TxOut : network_params = net_query ( network ) return TxOut ( network = network_params , value = int ( value * network_params . to_unit ) , n = n , script_pubkey = script )
5,946
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L259-L267
[ "def", "load", "(", "path", "=", "None", ",", "*", "*", "kwargs", ")", ":", "conn", "=", "__proxy__", "[", "'junos.conn'", "]", "(", ")", "ret", "=", "{", "}", "ret", "[", "'out'", "]", "=", "True", "if", "path", "is", "None", ":", "ret", "[", "'message'", "]", "=", "'Please provide the salt path where the configuration is present'", "ret", "[", "'out'", "]", "=", "False", "return", "ret", "op", "=", "{", "}", "if", "'__pub_arg'", "in", "kwargs", ":", "if", "kwargs", "[", "'__pub_arg'", "]", ":", "if", "isinstance", "(", "kwargs", "[", "'__pub_arg'", "]", "[", "-", "1", "]", ",", "dict", ")", ":", "op", ".", "update", "(", "kwargs", "[", "'__pub_arg'", "]", "[", "-", "1", "]", ")", "else", ":", "op", ".", "update", "(", "kwargs", ")", "template_vars", "=", "{", "}", "if", "\"template_vars\"", "in", "op", ":", "template_vars", "=", "op", "[", "\"template_vars\"", "]", "template_cached_path", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "__salt__", "[", "'cp.get_template'", "]", "(", "path", ",", "template_cached_path", ",", "template_vars", "=", "template_vars", ")", "if", "not", "os", ".", "path", ".", "isfile", "(", "template_cached_path", ")", ":", "ret", "[", "'message'", "]", "=", "'Invalid file path.'", "ret", "[", "'out'", "]", "=", "False", "return", "ret", "if", "os", ".", "path", ".", "getsize", "(", "template_cached_path", ")", "==", "0", ":", "ret", "[", "'message'", "]", "=", "'Template failed to render'", "ret", "[", "'out'", "]", "=", "False", "return", "ret", "op", "[", "'path'", "]", "=", "template_cached_path", "if", "'format'", "not", "in", "op", ":", "if", "path", ".", "endswith", "(", "'set'", ")", ":", "template_format", "=", "'set'", "elif", "path", ".", "endswith", "(", "'xml'", ")", ":", "template_format", "=", "'xml'", "else", ":", "template_format", "=", "'text'", "op", "[", "'format'", "]", "=", "template_format", "if", "'replace'", "in", "op", "and", "op", "[", "'replace'", "]", ":", "op", "[", "'merge'", "]", "=", "False", "del", "op", "[", "'replace'", "]", "elif", "'overwrite'", "in", "op", "and", "op", "[", "'overwrite'", "]", ":", "op", "[", "'overwrite'", "]", "=", "True", "elif", "'overwrite'", "in", "op", "and", "not", "op", "[", "'overwrite'", "]", ":", "op", "[", "'merge'", "]", "=", "True", "del", "op", "[", "'overwrite'", "]", "try", ":", "conn", ".", "cu", ".", "load", "(", "*", "*", "op", ")", "ret", "[", "'message'", "]", "=", "\"Successfully loaded the configuration.\"", "except", "Exception", "as", "exception", ":", "ret", "[", "'message'", "]", "=", "'Could not load configuration due to : \"{0}\"'", ".", "format", "(", "exception", ")", "ret", "[", "'format'", "]", "=", "op", "[", "'format'", "]", "ret", "[", "'out'", "]", "=", "False", "return", "ret", "finally", ":", "salt", ".", "utils", ".", "files", ".", "safe_rm", "(", "template_cached_path", ")", "return", "ret" ]
create raw transaction
def make_raw_transaction ( network : str , inputs : list , outputs : list , locktime : Locktime , timestamp : int = int ( time ( ) ) , version : int = 1 , ) -> MutableTransaction : network_params = net_query ( network ) if network_params . name . startswith ( "peercoin" ) : return MutableTransaction ( version = version , ins = inputs , outs = outputs , locktime = locktime , network = network_params , timestamp = timestamp , ) return MutableTransaction ( version = version , ins = inputs , outs = outputs , locktime = locktime , network = network_params , )
5,947
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L270-L298
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
due to design of the btcpy library TxIn object must be converted to TxOut object before signing
def find_parent_outputs ( provider : Provider , utxo : TxIn ) -> TxOut : network_params = net_query ( provider . network ) index = utxo . txout # utxo index return TxOut . from_json ( provider . getrawtransaction ( utxo . txid , 1 ) [ 'vout' ] [ index ] , network = network_params )
5,948
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L301-L308
[ "def", "GetMostRecentClient", "(", "client_list", ",", "token", "=", "None", ")", ":", "last", "=", "rdfvalue", ".", "RDFDatetime", "(", "0", ")", "client_urn", "=", "None", "for", "client", "in", "aff4", ".", "FACTORY", ".", "MultiOpen", "(", "client_list", ",", "token", "=", "token", ")", ":", "client_last", "=", "client", ".", "Get", "(", "client", ".", "Schema", ".", "LAST", ")", "if", "client_last", ">", "last", ":", "last", "=", "client_last", "client_urn", "=", "client", ".", "urn", "return", "client_urn" ]
sign transaction with Kutil
def sign_transaction ( provider : Provider , unsigned : MutableTransaction , key : Kutil ) -> Transaction : parent_outputs = [ find_parent_outputs ( provider , i ) for i in unsigned . ins ] return key . sign_transaction ( parent_outputs , unsigned )
5,949
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/transactions.py#L311-L316
[ "def", "parse_requirements", "(", "fname", "=", "'requirements.txt'", ")", ":", "from", "os", ".", "path", "import", "dirname", ",", "join", ",", "exists", "import", "re", "require_fpath", "=", "join", "(", "dirname", "(", "__file__", ")", ",", "fname", ")", "# This breaks on pip install, so check that it exists.", "if", "exists", "(", "require_fpath", ")", ":", "with", "open", "(", "require_fpath", ",", "'r'", ")", "as", "f", ":", "packages", "=", "[", "]", "for", "line", "in", "f", ".", "readlines", "(", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "line", "and", "not", "line", ".", "startswith", "(", "'#'", ")", ":", "if", "line", ".", "startswith", "(", "'-e '", ")", ":", "package", "=", "line", ".", "split", "(", "'#egg='", ")", "[", "1", "]", "packages", ".", "append", "(", "package", ")", "else", ":", "pat", "=", "'|'", ".", "join", "(", "[", "'>'", ",", "'>='", ",", "'=='", "]", ")", "package", "=", "re", ".", "split", "(", "pat", ",", "line", ")", "[", "0", "]", "packages", ".", "append", "(", "package", ")", "return", "packages", "return", "[", "]" ]
Changes Matplotlib basic style to produce high quality graphs . Call this function at the beginning of your script . You can even further improve graphs with a call to fix_style at the end of your script .
def set_style ( style = 'basic' , * * kwargs ) : style = _read_style ( style ) # Add basic style as the first style if style [ 0 ] != 'basic' : style = [ 'basic' ] + style # Apply all styles for s in style : _set_style ( s , * * kwargs )
5,950
https://github.com/erwanp/publib/blob/0417e6a31d52e23b816ac74d40b4c11d4b8ba4a6/publib/main.py#L89-L125
[ "def", "setup", "(", "self", ")", ":", "[", "c", ".", "start", "(", ")", "for", "c", "in", "self", ".", "controllers", "]", "[", "c", ".", "wait_to_start", "(", ")", "for", "c", "in", "self", ".", "controllers", "]" ]
Add an extra formatting layer to an axe that couldn t be changed directly in matplotlib . rcParams or with styles . Apply this function to every axe you created .
def fix_style ( style = 'basic' , ax = None , * * kwargs ) : style = _read_style ( style ) # Apply all styles for s in style : if not s in style_params . keys ( ) : avail = [ f . replace ( '.mplstyle' , '' ) for f in os . listdir ( _get_lib ( ) ) if f . endswith ( '.mplstyle' ) ] raise ValueError ( '{0} is not a valid style. ' . format ( s ) + 'Please pick a style from the list available in ' + '{0}: {1}' . format ( _get_lib ( ) , avail ) ) _fix_style ( style , ax , * * kwargs )
5,951
https://github.com/erwanp/publib/blob/0417e6a31d52e23b816ac74d40b4c11d4b8ba4a6/publib/main.py#L147-L192
[ "def", "_clean_workers", "(", "self", ")", ":", "while", "self", ".", "_bag_collector", ":", "self", ".", "_bag_collector", ".", "popleft", "(", ")", "self", ".", "_timer_worker_delete", ".", "stop", "(", ")" ]
Find the label for the output files for this calculation
def _get_label ( self ) : if self . _label is None : foundfiles = False for f in self . _files : if ".files" in f : foundfiles = True self . _label = f . split ( "." ) [ 0 ] with open ( self . _label + '.files' , 'r' ) as fp : line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + ".in" : fp . close ( ) raise Exception ( 'first line must be label.in' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + ".txt" : fp . close ( ) raise Exception ( 'second line must be label.txt' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + "i" : fp . close ( ) raise Exception ( 'third line must be labeli' ) line = fp . readline ( ) . split ( ) [ 0 ] if line != self . _label + "o" : fp . close ( ) raise Exception ( 'fourth line must be labelo' ) fp . close ( ) if foundfiles : return self . _label else : raise Exception ( 'label.files not found' ) #ASE format # (self.prefix + '.in') # input # (self.prefix + '.txt')# output # (self.prefix + 'i') # input # (self.prefix + 'o') # output else : return self . _label
5,952
https://github.com/CitrineInformatics/pif-dft/blob/d5411dc1f6c6e8d454b132977ca7ab3bb8131a80/dfttopif/parsers/abinit.py#L30-L70
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Returns the player whose turn it will be next .
def next_player ( self ) : logging . warning ( 'turn={}, players={}' . format ( self . game . _cur_turn , self . game . players ) ) return self . game . players [ ( self . game . _cur_turn + 1 ) % len ( self . game . players ) ]
5,953
https://github.com/rosshamish/catan-py/blob/120438a8f16e39c13322c5d5930e1064e1d3f4be/catan/states.py#L184-L198
[ "def", "unmount", "(", "self", ")", ":", "self", ".", "unmount_bindmounts", "(", ")", "self", ".", "unmount_mounts", "(", ")", "self", ".", "unmount_volume_groups", "(", ")", "self", ".", "unmount_loopbacks", "(", ")", "self", ".", "unmount_base_images", "(", ")", "self", ".", "clean_dirs", "(", ")" ]
estimates the current progress that is then used in _receive_signal
def _estimate_progress ( self ) : estimate = True # ==== get the current subscript and the time it takes to execute it ===== current_subscript = self . _current_subscript_stage [ 'current_subscript' ] # ==== get the number of subscripts ===== num_subscripts = len ( self . scripts ) # ==== get number of iterations and loop index ====================== if self . iterator_type == 'loop' : num_iterations = self . settings [ 'num_loops' ] elif self . iterator_type == 'sweep' : sweep_range = self . settings [ 'sweep_range' ] if self . settings [ 'stepping_mode' ] == 'value_step' : num_iterations = int ( ( sweep_range [ 'max_value' ] - sweep_range [ 'min_value' ] ) / sweep_range [ 'N/value_step' ] ) + 1 # len(np.linspace(sweep_range['min_value'], sweep_range['max_value'], # (sweep_range['max_value'] - sweep_range['min_value']) / # sweep_range['N/value_step'] + 1, endpoint=True).tolist()) elif self . settings [ 'stepping_mode' ] == 'N' : num_iterations = sweep_range [ 'N/value_step' ] else : raise KeyError ( 'unknown key' + self . settings [ 'stepping_mode' ] ) else : print ( 'unknown iterator type in Iterator receive signal - can\'t estimate ramining time' ) estimate = False if estimate : # get number of loops (completed + 1) loop_index = self . loop_index if num_subscripts > 1 : # estimate the progress based on the duration the individual subscripts loop_execution_time = 0. # time for a single loop execution in s sub_progress_time = 0. # progress of current loop iteration in s # ==== get typical duration of current subscript ====================== if current_subscript is not None : current_subscript_exec_duration = self . _current_subscript_stage [ 'subscript_exec_duration' ] [ current_subscript . name ] . total_seconds ( ) else : current_subscript_exec_duration = 0.0 current_subscript_elapsed_time = ( datetime . datetime . now ( ) - current_subscript . start_time ) . total_seconds ( ) # estimate the duration of the current subscript if the script hasn't been executed once fully and subscript_exec_duration is 0 if current_subscript_exec_duration == 0.0 : remaining_time = current_subscript . remaining_time . total_seconds ( ) current_subscript_exec_duration = remaining_time + current_subscript_elapsed_time # ==== get typical duration of one loop iteration ====================== remaining_scripts = 0 # script that remain to be executed for the first time for subscript_name , duration in self . _current_subscript_stage [ 'subscript_exec_duration' ] . items ( ) : if duration . total_seconds ( ) == 0.0 : remaining_scripts += 1 loop_execution_time += duration . total_seconds ( ) # add the times of the subscripts that have been executed in the current loop # ignore the current subscript, because that will be taken care of later if self . _current_subscript_stage [ 'subscript_exec_count' ] [ subscript_name ] == loop_index and subscript_name is not current_subscript . name : # this subscript has already been executed in this iteration sub_progress_time += duration . total_seconds ( ) # add the proportional duration of the current subscript given by the subscript progress sub_progress_time += current_subscript_elapsed_time # if there are scripts that have not been executed yet # assume that all the scripts that have not been executed yet take as long as the average of the other scripts if remaining_scripts == num_subscripts : # none of the subscript has been finished. assume that all the scripts take as long as the first loop_execution_time = num_subscripts * current_subscript_exec_duration elif remaining_scripts > 1 : loop_execution_time = 1. * num_subscripts / ( num_subscripts - remaining_scripts ) elif remaining_scripts == 1 : # there is only one script left which is the current script loop_execution_time += current_subscript_exec_duration if loop_execution_time > 0 : progress_subscript = 100. * sub_progress_time / loop_execution_time else : progress_subscript = 1. * progress_subscript / num_subscripts # print(' === script iterator progress estimation loop_index = {:d}/{:d}, progress_subscript = {:f}'.format(loop_index, number_of_iterations, progress_subscript)) progress = 100. * ( loop_index - 1. + 0.01 * progress_subscript ) / num_iterations else : # if can't estimate the remaining time set to half progress = 50 return progress
5,954
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/script_iterator.py#L242-L338
[ "def", "remove_volume", "(", "self", ",", "volume_name", ")", ":", "logger", ".", "info", "(", "\"removing volume '%s'\"", ",", "volume_name", ")", "try", ":", "self", ".", "d", ".", "remove_volume", "(", "volume_name", ")", "except", "APIError", "as", "ex", ":", "if", "ex", ".", "response", ".", "status_code", "==", "requests", ".", "codes", ".", "CONFLICT", ":", "logger", ".", "debug", "(", "\"ignoring a conflict when removing volume %s\"", ",", "volume_name", ")", "else", ":", "raise", "ex" ]
When each subscript is called uses its standard plotting
def plot ( self , figure_list ) : #TODO: be smarter about how we plot ScriptIterator if self . _current_subscript_stage is not None : if self . _current_subscript_stage [ 'current_subscript' ] is not None : self . _current_subscript_stage [ 'current_subscript' ] . plot ( figure_list ) if ( self . is_running is False ) and not ( self . data == { } or self . data is None ) : script_names = list ( self . settings [ 'script_order' ] . keys ( ) ) script_indices = [ self . settings [ 'script_order' ] [ name ] for name in script_names ] _ , sorted_script_names = list ( zip ( * sorted ( zip ( script_indices , script_names ) ) ) ) last_script = self . scripts [ sorted_script_names [ - 1 ] ] last_script . force_update ( ) # since we use the last script plot function we force it to refresh axes_list = last_script . get_axes_layout ( figure_list ) # catch error is _plot function doens't take optional data argument try : last_script . _plot ( axes_list , self . data ) except TypeError as err : print ( ( warnings . warn ( 'can\'t plot average script data because script.plot function doens\'t take data as optional argument. Plotting last data set instead' ) ) ) print ( ( err . message ) ) last_script . plot ( figure_list )
5,955
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/script_iterator.py#L360-L392
[ "def", "calculateOverlapCurve", "(", "sp", ",", "inputVectors", ")", ":", "columnNumber", "=", "np", ".", "prod", "(", "sp", ".", "getColumnDimensions", "(", ")", ")", "numInputVector", ",", "inputSize", "=", "inputVectors", ".", "shape", "outputColumns", "=", "np", ".", "zeros", "(", "(", "numInputVector", ",", "columnNumber", ")", ",", "dtype", "=", "uintType", ")", "outputColumnsCorrupted", "=", "np", ".", "zeros", "(", "(", "numInputVector", ",", "columnNumber", ")", ",", "dtype", "=", "uintType", ")", "noiseLevelList", "=", "np", ".", "linspace", "(", "0", ",", "1.0", ",", "21", ")", "inputOverlapScore", "=", "np", ".", "zeros", "(", "(", "numInputVector", ",", "len", "(", "noiseLevelList", ")", ")", ")", "outputOverlapScore", "=", "np", ".", "zeros", "(", "(", "numInputVector", ",", "len", "(", "noiseLevelList", ")", ")", ")", "for", "i", "in", "range", "(", "numInputVector", ")", ":", "for", "j", "in", "range", "(", "len", "(", "noiseLevelList", ")", ")", ":", "inputVectorCorrupted", "=", "copy", ".", "deepcopy", "(", "inputVectors", "[", "i", "]", "[", ":", "]", ")", "corruptSparseVector", "(", "inputVectorCorrupted", ",", "noiseLevelList", "[", "j", "]", ")", "sp", ".", "compute", "(", "inputVectors", "[", "i", "]", "[", ":", "]", ",", "False", ",", "outputColumns", "[", "i", "]", "[", ":", "]", ")", "sp", ".", "compute", "(", "inputVectorCorrupted", ",", "False", ",", "outputColumnsCorrupted", "[", "i", "]", "[", ":", "]", ")", "inputOverlapScore", "[", "i", "]", "[", "j", "]", "=", "percentOverlap", "(", "inputVectors", "[", "i", "]", "[", ":", "]", ",", "inputVectorCorrupted", ")", "outputOverlapScore", "[", "i", "]", "[", "j", "]", "=", "percentOverlap", "(", "outputColumns", "[", "i", "]", "[", ":", "]", ",", "outputColumnsCorrupted", "[", "i", "]", "[", ":", "]", ")", "return", "noiseLevelList", ",", "inputOverlapScore", ",", "outputOverlapScore" ]
assigning the actual script settings depending on the iterator type
def get_default_settings ( sub_scripts , script_order , script_execution_freq , iterator_type ) : def populate_sweep_param ( scripts , parameter_list , trace = '' ) : ''' Args: scripts: a dict of {'class name': <class object>} pairs Returns: A list of all parameters of the input scripts ''' def get_parameter_from_dict ( trace , dic , parameter_list , valid_values = None ) : """ appends keys in the dict to a list in the form trace.key.subkey.subsubkey... Args: trace: initial prefix (path through scripts and parameters to current location) dic: dictionary parameter_list: list to which append the parameters valid_values: valid values of dictionary values if None dic should be a dictionary Returns: """ if valid_values is None and isinstance ( dic , Parameter ) : valid_values = dic . valid_values for key , value in dic . items ( ) : if isinstance ( value , dict ) : # for nested parameters ex {point: {'x': int, 'y': int}} parameter_list = get_parameter_from_dict ( trace + '.' + key , value , parameter_list , dic . valid_values [ key ] ) elif ( valid_values [ key ] in ( float , int ) ) or ( isinstance ( valid_values [ key ] , list ) and valid_values [ key ] [ 0 ] in ( float , int ) ) : parameter_list . append ( trace + '.' + key ) else : # once down to the form {key: value} # in all other cases ignore parameter print ( ( 'ignoring sweep parameter' , key ) ) return parameter_list for script_name in list ( scripts . keys ( ) ) : from pylabcontrol . core import ScriptIterator script_trace = trace if script_trace == '' : script_trace = script_name else : script_trace = script_trace + '->' + script_name if issubclass ( scripts [ script_name ] , ScriptIterator ) : # gets subscripts of ScriptIterator objects populate_sweep_param ( vars ( scripts [ script_name ] ) [ '_SCRIPTS' ] , parameter_list = parameter_list , trace = script_trace ) else : # use inspect instead of vars to get _DEFAULT_SETTINGS also for classes that inherit _DEFAULT_SETTINGS from a superclass for setting in [ elem [ 1 ] for elem in inspect . getmembers ( scripts [ script_name ] ) if elem [ 0 ] == '_DEFAULT_SETTINGS' ] [ 0 ] : parameter_list = get_parameter_from_dict ( script_trace , setting , parameter_list ) return parameter_list if iterator_type == 'loop' : script_default_settings = [ Parameter ( 'script_order' , script_order ) , Parameter ( 'script_execution_freq' , script_execution_freq ) , Parameter ( 'num_loops' , 0 , int , 'times the subscripts will be executed' ) , Parameter ( 'run_all_first' , True , bool , 'Run all scripts with nonzero frequency in first pass' ) ] elif iterator_type == 'sweep' : sweep_params = populate_sweep_param ( sub_scripts , [ ] ) script_default_settings = [ Parameter ( 'script_order' , script_order ) , Parameter ( 'script_execution_freq' , script_execution_freq ) , Parameter ( 'sweep_param' , sweep_params [ 0 ] , sweep_params , 'variable over which to sweep' ) , Parameter ( 'sweep_range' , [ Parameter ( 'min_value' , 0 , float , 'min parameter value' ) , Parameter ( 'max_value' , 0 , float , 'max parameter value' ) , Parameter ( 'N/value_step' , 0 , float , 'either number of steps or parameter value step, depending on mode' ) ] ) , Parameter ( 'stepping_mode' , 'N' , [ 'N' , 'value_step' ] , 'Switch between number of steps and step amount' ) , Parameter ( 'run_all_first' , True , bool , 'Run all scripts with nonzero frequency in first pass' ) ] else : print ( ( 'unknown iterator type ' + iterator_type ) ) raise TypeError ( 'unknown iterator type ' + iterator_type ) return script_default_settings
5,956
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/core/script_iterator.py#L451-L552
[ "def", "renew_compose", "(", "self", ",", "compose_id", ")", ":", "logger", ".", "info", "(", "\"Renewing compose %d\"", ",", "compose_id", ")", "response", "=", "self", ".", "session", ".", "patch", "(", "'{}composes/{}'", ".", "format", "(", "self", ".", "url", ",", "compose_id", ")", ")", "response", ".", "raise_for_status", "(", ")", "response_json", "=", "response", ".", "json", "(", ")", "compose_id", "=", "response_json", "[", "'id'", "]", "logger", ".", "info", "(", "\"Renewed compose is %d\"", ",", "compose_id", ")", "return", "response_json" ]
Convert raster to ESRI shapefile
def raster2shp ( rasterfile , vectorshp , layername = None , fieldname = None , band_num = 1 , mask = 'default' ) : FileClass . remove_files ( vectorshp ) FileClass . check_file_exists ( rasterfile ) # this allows GDAL to throw Python Exceptions gdal . UseExceptions ( ) src_ds = gdal . Open ( rasterfile ) if src_ds is None : print ( 'Unable to open %s' % rasterfile ) sys . exit ( 1 ) try : srcband = src_ds . GetRasterBand ( band_num ) except RuntimeError as e : # for example, try GetRasterBand(10) print ( 'Band ( %i ) not found, %s' % ( band_num , e ) ) sys . exit ( 1 ) if mask == 'default' : maskband = srcband . GetMaskBand ( ) elif mask is None or mask . upper ( ) == 'NONE' : maskband = None else : mask_ds = gdal . Open ( mask ) maskband = mask_ds . GetRasterBand ( 1 ) # create output datasource if layername is None : layername = FileClass . get_core_name_without_suffix ( rasterfile ) drv = ogr_GetDriverByName ( str ( 'ESRI Shapefile' ) ) dst_ds = drv . CreateDataSource ( vectorshp ) srs = None if src_ds . GetProjection ( ) != '' : srs = osr_SpatialReference ( ) srs . ImportFromWkt ( src_ds . GetProjection ( ) ) dst_layer = dst_ds . CreateLayer ( str ( layername ) , srs = srs ) if fieldname is None : fieldname = layername . upper ( ) fd = ogr_FieldDefn ( str ( fieldname ) , OFTInteger ) dst_layer . CreateField ( fd ) dst_field = 0 result = gdal . Polygonize ( srcband , maskband , dst_layer , dst_field , [ '8CONNECTED=8' ] , callback = None ) return result
5,957
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/vector.py#L36-L77
[ "def", "parse_topic", "(", "self", ",", "params", ",", "region", ",", "topic", ")", ":", "topic", "[", "'arn'", "]", "=", "topic", ".", "pop", "(", "'TopicArn'", ")", "topic", "[", "'name'", "]", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "(", "prefix", ",", "partition", ",", "service", ",", "region", ",", "account", ",", "name", ")", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "api_client", "=", "api_clients", "[", "region", "]", "attributes", "=", "api_client", ".", "get_topic_attributes", "(", "TopicArn", "=", "topic", "[", "'arn'", "]", ")", "[", "'Attributes'", "]", "for", "k", "in", "[", "'Owner'", ",", "'DisplayName'", "]", ":", "topic", "[", "k", "]", "=", "attributes", "[", "k", "]", "if", "k", "in", "attributes", "else", "None", "for", "k", "in", "[", "'Policy'", ",", "'DeliveryPolicy'", ",", "'EffectiveDeliveryPolicy'", "]", ":", "topic", "[", "k", "]", "=", "json", ".", "loads", "(", "attributes", "[", "k", "]", ")", "if", "k", "in", "attributes", "else", "None", "topic", "[", "'name'", "]", "=", "topic", "[", "'arn'", "]", ".", "split", "(", "':'", ")", "[", "-", "1", "]", "manage_dictionary", "(", "topic", ",", "'subscriptions'", ",", "{", "}", ")", "manage_dictionary", "(", "topic", ",", "'subscriptions_count'", ",", "0", ")", "self", ".", "topics", "[", "topic", "[", "'name'", "]", "]", "=", "topic" ]
convert shapefile to geojson file
def convert2geojson ( jsonfile , src_srs , dst_srs , src_file ) : if os . path . exists ( jsonfile ) : os . remove ( jsonfile ) if sysstr == 'Windows' : exepath = '"%s/Lib/site-packages/osgeo/ogr2ogr"' % sys . exec_prefix else : exepath = FileClass . get_executable_fullpath ( 'ogr2ogr' ) # os.system(s) s = '%s -f GeoJSON -s_srs "%s" -t_srs %s %s %s' % ( exepath , src_srs , dst_srs , jsonfile , src_file ) UtilClass . run_command ( s )
5,958
https://github.com/lreis2415/PyGeoC/blob/9a92d1a229bb74298e3c57f27c97079980b5f729/pygeoc/vector.py#L80-L91
[ "def", "get_records", "(", "self", ")", ":", "form", "=", "self", ".", "request", ".", "form", "ar_count", "=", "self", ".", "get_ar_count", "(", ")", "records", "=", "[", "]", "# Group belonging AR fields together", "for", "arnum", "in", "range", "(", "ar_count", ")", ":", "record", "=", "{", "}", "s1", "=", "\"-{}\"", ".", "format", "(", "arnum", ")", "keys", "=", "filter", "(", "lambda", "key", ":", "s1", "in", "key", ",", "form", ".", "keys", "(", ")", ")", "for", "key", "in", "keys", ":", "new_key", "=", "key", ".", "replace", "(", "s1", ",", "\"\"", ")", "value", "=", "form", ".", "get", "(", "key", ")", "record", "[", "new_key", "]", "=", "value", "records", ".", "append", "(", "record", ")", "return", "records" ]
Get the consensus of an alignment as a string .
def consensus ( aln , weights = None , gap_threshold = 0.5 , simple = False , trim_ends = True ) : # Choose your algorithms! if simple : # Use the simple, unweighted algorithm col_consensus = make_simple_col_consensus ( alnutils . aa_frequencies ( aln ) ) def is_majority_gap ( col ) : return ( float ( col . count ( '-' ) ) / len ( col ) >= gap_threshold ) # ENH (alternatively/additionally): does any aa occur more than once? # ENH: choose gap-decisionmaking separately from col_consensus else : # Use the entropy-based, weighted algorithm if weights is None : seq_weights = alnutils . sequence_weights ( aln , 'avg1' ) else : seq_weights = weights aa_frequencies = alnutils . aa_frequencies ( aln , weights = seq_weights ) col_consensus = make_entropy_col_consensus ( aa_frequencies ) def is_majority_gap ( col ) : gap_count = 0.0 for wt , char in zip ( seq_weights , col ) : if char == '-' : gap_count += wt return ( gap_count / sum ( seq_weights ) >= gap_threshold ) # Traverse the alignment, handling gaps etc. def col_wise_consensus ( columns ) : """Calculate the consensus chars for an iterable of columns.""" if not trim_ends : # Track if we're in the N-term or C-term end of the sequence in_left_end = True maybe_right_tail = [ ] # prev_col = None # prev_char = None for col in columns : # Lowercase cols mean explicitly, "don't include in consensus" if all ( c . islower ( ) for c in col if c not in '.-' ) : yield '-' continue if any ( c . islower ( ) for c in col ) : logging . warn ( 'Mixed lowercase and uppercase letters in a ' 'column: ' + '' . join ( col ) ) col = map ( str . upper , col ) # Gap chars is_gap = is_majority_gap ( col ) if not trim_ends : # Avoid N-terminal gaps in the consensus sequence if in_left_end : if not is_gap : # Match -- we're no longer in the left end in_left_end = False is_gap = False # When to yield a gap here: # ----------- --------- ------ ---------- # in_left_end trim_ends is_gap yield gap? # ----------- --------- ------ ---------- # True True (True) yes # True False (False) (no -- def. char) # False True T/F yes, if is_gap # False False (T/F) NO! use maybe_right_tail # ----------- --------- ------ ---------- if is_gap and trim_ends : yield '-' continue # Get the consensus character, using the chosen algorithm cons_char = col_consensus ( col ) if trim_ends : yield cons_char else : # Avoid C-terminal gaps in the consensus sequence if is_gap : maybe_right_tail . append ( cons_char ) else : # Match -> gaps weren't the right tail; emit all gaps for char in maybe_right_tail : yield '-' maybe_right_tail = [ ] yield cons_char # prev_col = col # prev_char = cons_char # Finally, if we were keeping a right (C-term) tail, emit it if not trim_ends : for char in maybe_right_tail : yield char return '' . join ( col_wise_consensus ( zip ( * aln ) ) )
5,959
https://github.com/etal/biofrills/blob/36684bb6c7632f96215e8b2b4ebc86640f331bcd/biofrills/consensus.py#L10-L130
[ "def", "delete_datapoint", "(", "self", ",", "datapoint", ")", ":", "datapoint", "=", "validate_type", "(", "datapoint", ",", "DataPoint", ")", "self", ".", "_conn", ".", "delete", "(", "\"/ws/DataPoint/{stream_id}/{datapoint_id}\"", ".", "format", "(", "stream_id", "=", "self", ".", "get_stream_id", "(", ")", ",", "datapoint_id", "=", "datapoint", ".", "get_id", "(", ")", ",", ")", ")" ]
Consensus by simple plurality unweighted .
def make_simple_col_consensus ( bg_freqs ) : # Hack: use default kwargs to persist across iterations def col_consensus ( col , prev_col = [ ] , prev_char = [ ] ) : # Count the amino acid types in this column aa_counts = sequtils . aa_frequencies ( col ) assert aa_counts , "Column is all gaps! That's not allowed." # Take the most common residue(s) best_char , best_score = max ( aa_counts . iteritems ( ) , key = lambda kv : kv [ 1 ] ) # Resolve ties ties = [ aa for aa in aa_counts if aa_counts [ aa ] == best_score ] if len ( ties ) > 1 : # Breaker #1: most common after the prev. consensus char # Resolve a tied col by restricting to rows where the preceding # char is the consensus type for that (preceding) col if prev_char and prev_col : mc_next = Counter ( [ b for a , b in zip ( prev_col , col ) if a == prev_char [ 0 ] and b in ties ] ) . most_common ( ) ties_next = [ x [ 0 ] for x in mc_next if x [ 1 ] == mc_next [ 0 ] [ 1 ] ] if ties_next : ties = ties_next if len ( ties ) > 1 : # Breaker #2: lowest overall residue frequency ties . sort ( key = lambda aa : bg_freqs [ aa ] ) best_char = ties [ 0 ] else : assert best_char == ties [ 0 ] , 'WTF %s != %s[0]' % ( best_char , ties ) # Save values for tie-breaker #1 prev_col [ : ] = col prev_char [ : ] = best_char return best_char return col_consensus
5,960
https://github.com/etal/biofrills/blob/36684bb6c7632f96215e8b2b4ebc86640f331bcd/biofrills/consensus.py#L166-L209
[ "def", "customer_discount_webhook_handler", "(", "event", ")", ":", "crud_type", "=", "CrudType", ".", "determine", "(", "event", "=", "event", ")", "discount_data", "=", "event", ".", "data", ".", "get", "(", "\"object\"", ",", "{", "}", ")", "coupon_data", "=", "discount_data", ".", "get", "(", "\"coupon\"", ",", "{", "}", ")", "customer", "=", "event", ".", "customer", "if", "crud_type", ".", "created", "or", "crud_type", ".", "updated", ":", "coupon", ",", "_", "=", "_handle_crud_like_event", "(", "target_cls", "=", "models", ".", "Coupon", ",", "event", "=", "event", ",", "data", "=", "coupon_data", ",", "id", "=", "coupon_data", ".", "get", "(", "\"id\"", ")", ")", "coupon_start", "=", "discount_data", ".", "get", "(", "\"start\"", ")", "coupon_end", "=", "discount_data", ".", "get", "(", "\"end\"", ")", "else", ":", "coupon", "=", "None", "coupon_start", "=", "None", "coupon_end", "=", "None", "customer", ".", "coupon", "=", "coupon", "customer", ".", "coupon_start", "=", "convert_tstamp", "(", "coupon_start", ")", "customer", ".", "coupon_end", "=", "convert_tstamp", "(", "coupon_end", ")", "customer", ".", "save", "(", ")" ]
Get only the supported consensus residues in each column .
def supported ( aln ) : def col_consensus ( columns ) : """Calculate the consensus chars for an iterable of columns.""" for col in columns : if ( # Majority gap chars ( col . count ( '-' ) >= len ( col ) / 2 ) or # Lowercase cols mean "don't include in consensus" all ( c . islower ( ) for c in col if c not in '.-' ) ) : yield '-' continue # Validation - copied from consensus() above if any ( c . islower ( ) for c in col ) : logging . warn ( 'Mixed lowercase and uppercase letters in a ' 'column: ' + '' . join ( col ) ) col = map ( str . upper , col ) # Calculate the consensus character most_common = Counter ( [ c for c in col if c not in '-' ] ) . most_common ( ) if not most_common : # XXX ever reached? logging . warn ( "Column is all gaps! How did that happen?" ) if most_common [ 0 ] [ 1 ] == 1 : # No char has frequency > 1; no consensus char yield '-' elif ( len ( most_common ) > 1 and most_common [ 0 ] [ 1 ] == most_common [ 1 ] [ 1 ] ) : # Tie for most-common residue type ties = [ x [ 0 ] for x in most_common if x [ 1 ] == most_common [ 0 ] [ 1 ] ] yield '' . join ( ties ) else : yield most_common [ 0 ] [ 0 ] return list ( col_consensus ( zip ( * aln ) ) )
5,961
https://github.com/etal/biofrills/blob/36684bb6c7632f96215e8b2b4ebc86640f331bcd/biofrills/consensus.py#L215-L261
[ "def", "logout", "(", "self", ")", ":", "# Check if all transfers are complete before logout", "self", ".", "transfers_complete", "payload", "=", "{", "'apikey'", ":", "self", ".", "config", ".", "get", "(", "'apikey'", ")", ",", "'logintoken'", ":", "self", ".", "session", ".", "cookies", ".", "get", "(", "'logintoken'", ")", "}", "method", ",", "url", "=", "get_URL", "(", "'logout'", ")", "res", "=", "getattr", "(", "self", ".", "session", ",", "method", ")", "(", "url", ",", "params", "=", "payload", ")", "if", "res", ".", "status_code", "==", "200", ":", "self", ".", "session", ".", "cookies", "[", "'logintoken'", "]", "=", "None", "return", "True", "hellraiser", "(", "res", ")" ]
Helper function detects a certain clause in tag tokens list . Returns its value .
def detect_clause ( parser , clause_name , tokens , as_filter_expr = True ) : if clause_name in tokens : t_index = tokens . index ( clause_name ) clause_value = tokens [ t_index + 1 ] if as_filter_expr : clause_value = parser . compile_filter ( clause_value ) del tokens [ t_index : t_index + 2 ] else : clause_value = None return clause_value
5,962
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/templatetags/sitecats.py#L102-L115
[ "def", "diff_aff", "(", "self", ")", ":", "row_degrees", "=", "np", ".", "array", "(", "self", ".", "kernel", ".", "sum", "(", "axis", "=", "1", ")", ")", ".", "reshape", "(", "-", "1", ",", "1", ")", "col_degrees", "=", "np", ".", "array", "(", "self", ".", "kernel", ".", "sum", "(", "axis", "=", "0", ")", ")", ".", "reshape", "(", "1", ",", "-", "1", ")", "if", "sparse", ".", "issparse", "(", "self", ".", "kernel", ")", ":", "return", "self", ".", "kernel", ".", "multiply", "(", "1", "/", "np", ".", "sqrt", "(", "row_degrees", ")", ")", ".", "multiply", "(", "1", "/", "np", ".", "sqrt", "(", "col_degrees", ")", ")", "else", ":", "return", "(", "self", ".", "kernel", "/", "np", ".", "sqrt", "(", "row_degrees", ")", ")", "/", "np", ".", "sqrt", "(", "col_degrees", ")" ]
install an IPython > = 3 . 0 kernelspec that loads corral env
def install_kernel_spec ( self , app , dir_name , display_name , settings_module , ipython_arguments ) : ksm = app . kernel_spec_manager try_spec_names = [ 'python3' if six . PY3 else 'python2' , 'python' ] if isinstance ( try_spec_names , six . string_types ) : try_spec_names = [ try_spec_names ] ks = None for spec_name in try_spec_names : try : ks = ksm . get_kernel_spec ( spec_name ) break except Exception : continue if not ks : self . parser . error ( "No notebook (Python) kernel specs found" ) ks . display_name = display_name ks . env [ "CORRAL_SETTINGS_MODULE" ] = settings_module ks . argv . extend ( ipython_arguments ) in_corral_dir , in_corral = os . path . split ( os . path . realpath ( sys . argv [ 0 ] ) ) pythonpath = ks . env . get ( 'PYTHONPATH' , os . environ . get ( 'PYTHONPATH' , '' ) ) pythonpath = pythonpath . split ( ':' ) if in_corral_dir not in pythonpath : pythonpath . append ( in_corral_dir ) ks . env [ 'PYTHONPATH' ] = ':' . join ( filter ( None , pythonpath ) ) kernel_dir = os . path . join ( ksm . user_kernel_dir , conf . PACKAGE ) if not os . path . exists ( kernel_dir ) : os . makedirs ( kernel_dir ) shutil . copy ( res . fullpath ( "logo-64x64.png" ) , kernel_dir ) with open ( os . path . join ( kernel_dir , 'kernel.json' ) , 'w' ) as f : f . write ( ks . to_json ( ) )
5,963
https://github.com/toros-astro/corral/blob/75474b38ff366330d33644461a902d07374a5bbc/corral/cli/commands.py#L250-L289
[ "def", "_get_videoname", "(", "cls", ",", "videofile", ")", ":", "name", "=", "os", ".", "path", ".", "basename", "(", "videofile", ")", "name", "=", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "0", "]", "return", "name" ]
Initializes local cache from Django cache if required .
def _cache_init ( self ) : cache_ = cache . get ( self . CACHE_ENTRY_NAME ) if cache_ is None : categories = get_category_model ( ) . objects . order_by ( 'sort_order' ) ids = { category . id : category for category in categories } aliases = { category . alias : category for category in categories if category . alias } parent_to_children = OrderedDict ( ) # Preserve aliases order. for category in categories : parent_category = ids . get ( category . parent_id , False ) parent_alias = None if parent_category : parent_alias = parent_category . alias if parent_alias not in parent_to_children : parent_to_children [ parent_alias ] = [ ] parent_to_children [ parent_alias ] . append ( category . id ) cache_ = { self . CACHE_NAME_IDS : ids , self . CACHE_NAME_PARENTS : parent_to_children , self . CACHE_NAME_ALIASES : aliases } cache . set ( self . CACHE_ENTRY_NAME , cache_ , self . CACHE_TIMEOUT ) self . _cache = cache_
5,964
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L57-L85
[ "def", "SetConsoleTextAttribute", "(", "stream_id", ",", "attrs", ")", ":", "handle", "=", "handles", "[", "stream_id", "]", "return", "windll", ".", "kernel32", ".", "SetConsoleTextAttribute", "(", "handle", ",", "attrs", ")" ]
Returns cache entry parameter value by its name .
def _cache_get_entry ( self , entry_name , key = ENTIRE_ENTRY_KEY , default = False ) : if key is self . ENTIRE_ENTRY_KEY : return self . _cache [ entry_name ] return self . _cache [ entry_name ] . get ( key , default )
5,965
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L94-L104
[ "def", "compress_pdf", "(", "pdf_fpath", ",", "output_fname", "=", "None", ")", ":", "import", "utool", "as", "ut", "ut", ".", "assertpath", "(", "pdf_fpath", ")", "suffix", "=", "'_'", "+", "ut", ".", "get_datestamp", "(", "False", ")", "+", "'_compressed'", "print", "(", "'pdf_fpath = %r'", "%", "(", "pdf_fpath", ",", ")", ")", "output_pdf_fpath", "=", "ut", ".", "augpath", "(", "pdf_fpath", ",", "suffix", ",", "newfname", "=", "output_fname", ")", "print", "(", "'output_pdf_fpath = %r'", "%", "(", "output_pdf_fpath", ",", ")", ")", "gs_exe", "=", "find_ghostscript_exe", "(", ")", "cmd_list", "=", "(", "gs_exe", ",", "'-sDEVICE=pdfwrite'", ",", "'-dCompatibilityLevel=1.4'", ",", "'-dNOPAUSE'", ",", "'-dQUIET'", ",", "'-dBATCH'", ",", "'-sOutputFile='", "+", "output_pdf_fpath", ",", "pdf_fpath", ")", "ut", ".", "cmd", "(", "*", "cmd_list", ")", "return", "output_pdf_fpath" ]
Sorts the given aliases list returns a sorted list .
def sort_aliases ( self , aliases ) : self . _cache_init ( ) if not aliases : return aliases parent_aliases = self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . keys ( ) return [ parent_alias for parent_alias in parent_aliases if parent_alias in aliases ]
5,966
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L106-L116
[ "def", "read_chd_header", "(", "chd_file", ")", ":", "with", "open", "(", "chd_file", ",", "\"rb\"", ")", "as", "f", ":", "header", "=", "{", "\"cinefileheader\"", ":", "cine", ".", "CINEFILEHEADER", "(", ")", ",", "\"bitmapinfoheader\"", ":", "cine", ".", "BITMAPINFOHEADER", "(", ")", ",", "\"setup\"", ":", "cine", ".", "SETUP", "(", ")", ",", "}", "f", ".", "readinto", "(", "header", "[", "\"cinefileheader\"", "]", ")", "f", ".", "readinto", "(", "header", "[", "\"bitmapinfoheader\"", "]", ")", "f", ".", "readinto", "(", "header", "[", "\"setup\"", "]", ")", "return", "header" ]
Returns parent aliases for a list of child IDs .
def get_parents_for ( self , child_ids ) : self . _cache_init ( ) parent_candidates = [ ] for parent , children in self . _cache_get_entry ( self . CACHE_NAME_PARENTS ) . items ( ) : if set ( children ) . intersection ( child_ids ) : parent_candidates . append ( parent ) return set ( parent_candidates )
5,967
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L118-L130
[ "def", "check_sync", "(", "self", ")", ":", "# If refresh interval is not specified, we should refresh every time.", "expiration", "=", "utcnow", "(", ")", "refresh_td", "=", "current_app", ".", "config", ".", "get", "(", "'GITHUB_REFRESH_TIMEDELTA'", ")", "if", "refresh_td", ":", "expiration", "-=", "refresh_td", "last_sync", "=", "parse_timestamp", "(", "self", ".", "account", ".", "extra_data", "[", "'last_sync'", "]", ")", "return", "last_sync", "<", "expiration" ]
Returns a list with with categories under the given parent .
def get_children_for ( self , parent_alias = None , only_with_aliases = False ) : self . _cache_init ( ) child_ids = self . get_child_ids ( parent_alias ) if only_with_aliases : children = [ ] for cid in child_ids : category = self . get_category_by_id ( cid ) if category . alias : children . append ( category ) return children return [ self . get_category_by_id ( cid ) for cid in child_ids ]
5,968
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L132-L148
[ "def", "delta", "(", "f", ",", "s", ",", "d", "=", "None", ")", ":", "if", "d", "is", "None", ":", "d", "=", "tempfile", ".", "SpooledTemporaryFile", "(", "max_size", "=", "MAX_SPOOL", ",", "mode", "=", "'wb+'", ")", "sig", "=", "ctypes", ".", "c_void_p", "(", ")", "try", ":", "job", "=", "_librsync", ".", "rs_loadsig_begin", "(", "ctypes", ".", "byref", "(", "sig", ")", ")", "try", ":", "_execute", "(", "job", ",", "s", ")", "finally", ":", "_librsync", ".", "rs_job_free", "(", "job", ")", "r", "=", "_librsync", ".", "rs_build_hash_table", "(", "sig", ")", "if", "r", "!=", "RS_DONE", ":", "raise", "LibrsyncError", "(", "r", ")", "job", "=", "_librsync", ".", "rs_delta_begin", "(", "sig", ")", "try", ":", "_execute", "(", "job", ",", "f", ",", "d", ")", "finally", ":", "_librsync", ".", "rs_job_free", "(", "job", ")", "finally", ":", "_librsync", ".", "rs_free_sumset", "(", "sig", ")", "return", "d" ]
Returns child IDs of the given parent category
def get_child_ids ( self , parent_alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_PARENTS , parent_alias , [ ] )
5,969
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L150-L158
[ "def", "create_token_response", "(", "self", ",", "request", ",", "token_handler", ")", ":", "headers", "=", "self", ".", "_get_default_headers", "(", ")", "try", ":", "if", "self", ".", "request_validator", ".", "client_authentication_required", "(", "request", ")", ":", "log", ".", "debug", "(", "'Authenticating client, %r.'", ",", "request", ")", "if", "not", "self", ".", "request_validator", ".", "authenticate_client", "(", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "elif", "not", "self", ".", "request_validator", ".", "authenticate_client_id", "(", "request", ".", "client_id", ",", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "log", ".", "debug", "(", "'Validating access token request, %r.'", ",", "request", ")", "self", ".", "validate_token_request", "(", "request", ")", "except", "errors", ".", "OAuth2Error", "as", "e", ":", "log", ".", "debug", "(", "'Client error in token request, %s.'", ",", "e", ")", "headers", ".", "update", "(", "e", ".", "headers", ")", "return", "headers", ",", "e", ".", "json", ",", "e", ".", "status_code", "token", "=", "token_handler", ".", "create_token", "(", "request", ",", "self", ".", "refresh_token", ")", "for", "modifier", "in", "self", ".", "_token_modifiers", ":", "token", "=", "modifier", "(", "token", ")", "self", ".", "request_validator", ".", "save_token", "(", "token", ",", "request", ")", "log", ".", "debug", "(", "'Issuing token %r to client id %r (%r) and username %s.'", ",", "token", ",", "request", ".", "client_id", ",", "request", ".", "client", ",", "request", ".", "username", ")", "return", "headers", ",", "json", ".", "dumps", "(", "token", ")", ",", "200" ]
Returns Category object by its alias .
def get_category_by_alias ( self , alias ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_ALIASES , alias , None )
5,970
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L160-L168
[ "def", "_from_dict", "(", "cls", ",", "_dict", ")", ":", "args", "=", "{", "}", "if", "'request'", "in", "_dict", ":", "args", "[", "'request'", "]", "=", "MessageRequest", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'request'", ")", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'request\\' not present in Log JSON'", ")", "if", "'response'", "in", "_dict", ":", "args", "[", "'response'", "]", "=", "MessageResponse", ".", "_from_dict", "(", "_dict", ".", "get", "(", "'response'", ")", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'response\\' not present in Log JSON'", ")", "if", "'log_id'", "in", "_dict", ":", "args", "[", "'log_id'", "]", "=", "_dict", ".", "get", "(", "'log_id'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'log_id\\' not present in Log JSON'", ")", "if", "'request_timestamp'", "in", "_dict", ":", "args", "[", "'request_timestamp'", "]", "=", "_dict", ".", "get", "(", "'request_timestamp'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'request_timestamp\\' not present in Log JSON'", ")", "if", "'response_timestamp'", "in", "_dict", ":", "args", "[", "'response_timestamp'", "]", "=", "_dict", ".", "get", "(", "'response_timestamp'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'response_timestamp\\' not present in Log JSON'", ")", "if", "'workspace_id'", "in", "_dict", ":", "args", "[", "'workspace_id'", "]", "=", "_dict", ".", "get", "(", "'workspace_id'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'workspace_id\\' not present in Log JSON'", ")", "if", "'language'", "in", "_dict", ":", "args", "[", "'language'", "]", "=", "_dict", ".", "get", "(", "'language'", ")", "else", ":", "raise", "ValueError", "(", "'Required property \\'language\\' not present in Log JSON'", ")", "return", "cls", "(", "*", "*", "args", ")" ]
Returns Category object by its id .
def get_category_by_id ( self , cid ) : self . _cache_init ( ) return self . _cache_get_entry ( self . CACHE_NAME_IDS , cid )
5,971
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L170-L178
[ "def", "insertSaneDefaults", "(", "self", ")", ":", "self", ".", "raw", ".", "insert", "(", "0", ",", "'-A OUTPUT -o lo -j NOTRACK'", ")", "self", ".", "raw", ".", "insert", "(", "1", ",", "'-A PREROUTING -i lo -j NOTRACK'", ")", "self", ".", "filters", ".", "insert", "(", "0", ",", "'-A INPUT -i lo -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "1", ",", "'-A OUTPUT -o lo -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "2", ",", "'-A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT'", ")", "self", ".", "filters", ".", "insert", "(", "3", ",", "'-A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT'", ")", "return", "self" ]
Returns a dict with categories popularity stats .
def get_ties_stats ( self , categories , target_model = None ) : filter_kwargs = { 'category_id__in' : categories } if target_model is not None : is_cls = hasattr ( target_model , '__name__' ) if is_cls : concrete = False else : concrete = True filter_kwargs [ 'object_id' ] = target_model . id filter_kwargs [ 'content_type' ] = ContentType . objects . get_for_model ( target_model , for_concrete_model = concrete ) return { item [ 'category_id' ] : item [ 'ties_num' ] for item in get_tie_model ( ) . objects . filter ( * * filter_kwargs ) . values ( 'category_id' ) . annotate ( ties_num = Count ( 'category' ) ) }
5,972
https://github.com/idlesign/django-sitecats/blob/9b45e91fc0dcb63a0011780437fe28145e3ecce9/sitecats/utils.py#L197-L221
[ "def", "receive_request", "(", "self", ",", "transaction", ")", ":", "with", "transaction", ":", "transaction", ".", "separate_timer", "=", "self", ".", "_start_separate_timer", "(", "transaction", ")", "self", ".", "_blockLayer", ".", "receive_request", "(", "transaction", ")", "if", "transaction", ".", "block_transfer", ":", "self", ".", "_stop_separate_timer", "(", "transaction", ".", "separate_timer", ")", "self", ".", "_messageLayer", ".", "send_response", "(", "transaction", ")", "self", ".", "send_datagram", "(", "transaction", ".", "response", ")", "return", "self", ".", "_observeLayer", ".", "receive_request", "(", "transaction", ")", "self", ".", "_requestLayer", ".", "receive_request", "(", "transaction", ")", "if", "transaction", ".", "resource", "is", "not", "None", "and", "transaction", ".", "resource", ".", "changed", ":", "self", ".", "notify", "(", "transaction", ".", "resource", ")", "transaction", ".", "resource", ".", "changed", "=", "False", "elif", "transaction", ".", "resource", "is", "not", "None", "and", "transaction", ".", "resource", ".", "deleted", ":", "self", ".", "notify", "(", "transaction", ".", "resource", ")", "transaction", ".", "resource", ".", "deleted", "=", "False", "self", ".", "_observeLayer", ".", "send_response", "(", "transaction", ")", "self", ".", "_blockLayer", ".", "send_response", "(", "transaction", ")", "self", ".", "_stop_separate_timer", "(", "transaction", ".", "separate_timer", ")", "self", ".", "_messageLayer", ".", "send_response", "(", "transaction", ")", "if", "transaction", ".", "response", "is", "not", "None", ":", "if", "transaction", ".", "response", ".", "type", "==", "defines", ".", "Types", "[", "\"CON\"", "]", ":", "self", ".", "_start_retransmission", "(", "transaction", ",", "transaction", ".", "response", ")", "self", ".", "send_datagram", "(", "transaction", ".", "response", ")" ]
Load PeerAssets P2TH privkey into the local node .
def load_p2th_privkey_into_local_node ( provider : RpcNode , prod : bool = True ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "Import only works with local node." } error = { "error" : "Loading P2TH privkey failed." } pa_params = param_query ( provider . network ) if prod : provider . importprivkey ( pa_params . P2TH_wif , "PAPROD" ) # now verify if ismine == True if not provider . validateaddress ( pa_params . P2TH_addr ) [ 'ismine' ] : raise P2THImportFailed ( error ) else : provider . importprivkey ( pa_params . test_P2TH_wif , "PATEST" ) if not provider . validateaddress ( pa_params . test_P2TH_addr ) [ 'ismine' ] : raise P2THImportFailed ( error )
5,973
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L30-L45
[ "def", "_match_directories", "(", "self", ",", "entries", ",", "root", ",", "regex_string", ")", ":", "self", ".", "log", "(", "u\"Matching directory names in paged hierarchy\"", ")", "self", ".", "log", "(", "[", "u\"Matching within '%s'\"", ",", "root", "]", ")", "self", ".", "log", "(", "[", "u\"Matching regex '%s'\"", ",", "regex_string", "]", ")", "regex", "=", "re", ".", "compile", "(", "r\"\"", "+", "regex_string", ")", "directories", "=", "set", "(", ")", "root_len", "=", "len", "(", "root", ")", "for", "entry", "in", "entries", ":", "# look only inside root dir", "if", "entry", ".", "startswith", "(", "root", ")", ":", "self", ".", "log", "(", "[", "u\"Examining '%s'\"", ",", "entry", "]", ")", "# remove common prefix root/", "entry", "=", "entry", "[", "root_len", "+", "1", ":", "]", "# split path", "entry_splitted", "=", "entry", ".", "split", "(", "os", ".", "sep", ")", "# match regex", "if", "(", "(", "len", "(", "entry_splitted", ")", ">=", "2", ")", "and", "(", "re", ".", "match", "(", "regex", ",", "entry_splitted", "[", "0", "]", ")", "is", "not", "None", ")", ")", ":", "directories", ".", "add", "(", "entry_splitted", "[", "0", "]", ")", "self", ".", "log", "(", "[", "u\"Match: '%s'\"", ",", "entry_splitted", "[", "0", "]", "]", ")", "else", ":", "self", ".", "log", "(", "[", "u\"No match: '%s'\"", ",", "entry", "]", ")", "return", "sorted", "(", "directories", ")" ]
find deck spawn transactions via Provider it requires that Deck spawn P2TH were imported in local node or that remote API knows about P2TH address .
def find_deck_spawns ( provider : Provider , prod : bool = True ) -> Iterable [ str ] : pa_params = param_query ( provider . network ) if isinstance ( provider , RpcNode ) : if prod : decks = ( i [ "txid" ] for i in provider . listtransactions ( "PAPROD" ) ) else : decks = ( i [ "txid" ] for i in provider . listtransactions ( "PATEST" ) ) if isinstance ( provider , Cryptoid ) or isinstance ( provider , Explorer ) : if prod : decks = ( i for i in provider . listtransactions ( pa_params . P2TH_addr ) ) else : decks = ( i for i in provider . listtransactions ( pa_params . test_P2TH_addr ) ) return decks
5,974
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L57-L78
[ "def", "versor", "(", "v", ")", ":", "if", "isinstance", "(", "v", "[", "0", "]", ",", "np", ".", "ndarray", ")", ":", "return", "np", ".", "divide", "(", "v", ",", "mag", "(", "v", ")", "[", ":", ",", "None", "]", ")", "else", ":", "return", "v", "/", "mag", "(", "v", ")" ]
deck parser function
def deck_parser ( args : Tuple [ Provider , dict , int , str ] , prod : bool = True ) -> Optional [ Deck ] : provider = args [ 0 ] raw_tx = args [ 1 ] deck_version = args [ 2 ] p2th = args [ 3 ] try : validate_deckspawn_p2th ( provider , raw_tx , p2th ) d = parse_deckspawn_metainfo ( read_tx_opreturn ( raw_tx [ 'vout' ] [ 1 ] ) , deck_version ) if d : d [ "id" ] = raw_tx [ "txid" ] try : d [ "issue_time" ] = raw_tx [ "blocktime" ] except KeyError : d [ "time" ] = 0 d [ "issuer" ] = find_tx_sender ( provider , raw_tx ) d [ "network" ] = provider . network d [ "production" ] = prod d [ "tx_confirmations" ] = raw_tx [ "confirmations" ] return Deck ( * * d ) except ( InvalidDeckSpawn , InvalidDeckMetainfo , InvalidDeckVersion , InvalidNulldataOutput ) as err : pass return None
5,975
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L81-L113
[ "def", "rate_limit", "(", "f", ")", ":", "def", "new_f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "errors", "=", "0", "while", "True", ":", "resp", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "resp", ".", "status_code", "==", "200", ":", "errors", "=", "0", "return", "resp", "elif", "resp", ".", "status_code", "==", "401", ":", "# Hack to retain the original exception, but augment it with", "# additional context for the user to interpret it. In a Python", "# 3 only future we can raise a new exception of the same type", "# with a new message from the old error.", "try", ":", "resp", ".", "raise_for_status", "(", ")", "except", "requests", ".", "HTTPError", "as", "e", ":", "message", "=", "\"\\nThis is a protected or locked account, or\"", "+", "\" the credentials provided are no longer valid.\"", "e", ".", "args", "=", "(", "e", ".", "args", "[", "0", "]", "+", "message", ",", ")", "+", "e", ".", "args", "[", "1", ":", "]", "log", ".", "warning", "(", "\"401 Authentication required for %s\"", ",", "resp", ".", "url", ")", "raise", "elif", "resp", ".", "status_code", "==", "429", ":", "reset", "=", "int", "(", "resp", ".", "headers", "[", "'x-rate-limit-reset'", "]", ")", "now", "=", "time", ".", "time", "(", ")", "seconds", "=", "reset", "-", "now", "+", "10", "if", "seconds", "<", "1", ":", "seconds", "=", "10", "log", ".", "warning", "(", "\"rate limit exceeded: sleeping %s secs\"", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "elif", "resp", ".", "status_code", ">=", "500", ":", "errors", "+=", "1", "if", "errors", ">", "30", ":", "log", ".", "warning", "(", "\"too many errors from Twitter, giving up\"", ")", "resp", ".", "raise_for_status", "(", ")", "seconds", "=", "60", "*", "errors", "log", ".", "warning", "(", "\"%s from Twitter API, sleeping %s\"", ",", "resp", ".", "status_code", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "else", ":", "resp", ".", "raise_for_status", "(", ")", "return", "new_f" ]
find index of this tx in the blockid
def tx_serialization_order ( provider : Provider , blockhash : str , txid : str ) -> int : return provider . getblock ( blockhash ) [ "tx" ] . index ( txid )
5,976
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L116-L119
[ "def", "get_listing", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'listing'", ")", ":", "allEvents", "=", "self", ".", "get_allEvents", "(", ")", "openEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", "closedEvents", "=", "allEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", "publicEvents", "=", "allEvents", ".", "instance_of", "(", "PublicEvent", ")", "allSeries", "=", "allEvents", ".", "instance_of", "(", "Series", ")", "self", ".", "listing", "=", "{", "'allEvents'", ":", "allEvents", ",", "'openEvents'", ":", "openEvents", ",", "'closedEvents'", ":", "closedEvents", ",", "'publicEvents'", ":", "publicEvents", ",", "'allSeries'", ":", "allSeries", ",", "'regOpenEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedEvents'", ":", "publicEvents", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "publicevent__category__isnull", "=", "True", ")", "|", "Q", "(", "publicevent__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateEvents'", ":", "publicEvents", ".", "filter", "(", "publicevent__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'publicevent__category'", ")", ",", "'regOpenSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "True", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'regClosedSeries'", ":", "allSeries", ".", "filter", "(", "registrationOpen", "=", "False", ")", ".", "filter", "(", "Q", "(", "series__category__isnull", "=", "True", ")", "|", "Q", "(", "series__category__separateOnRegistrationPage", "=", "False", ")", ")", ",", "'categorySeparateSeries'", ":", "allSeries", ".", "filter", "(", "series__category__separateOnRegistrationPage", "=", "True", ")", ".", "order_by", "(", "'series__category'", ")", ",", "}", "return", "self", ".", "listing" ]
interpret issue mode bitfeg
def deck_issue_mode ( proto : DeckSpawnProto ) -> Iterable [ str ] : if proto . issue_mode == 0 : yield "NONE" return for mode , value in proto . MODE . items ( ) : if value > proto . issue_mode : continue if value & proto . issue_mode : yield mode
5,977
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L141-L152
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Decode deck_spawn tx op_return protobuf message and validate it Raise error if deck_spawn metainfo incomplete or version mistmatch .
def parse_deckspawn_metainfo ( protobuf : bytes , version : int ) -> dict : deck = DeckSpawnProto ( ) deck . ParseFromString ( protobuf ) error = { "error" : "Deck ({deck}) metainfo incomplete, deck must have a name." . format ( deck = deck . name ) } if deck . name == "" : raise InvalidDeckMetainfo ( error ) if deck . version != version : raise InvalidDeckVersion ( { "error" , "Deck version mismatch." } ) return { "version" : deck . version , "name" : deck . name , "issue_mode" : deck . issue_mode , "number_of_decimals" : deck . number_of_decimals , "asset_specific_data" : deck . asset_specific_data }
5,978
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L172-L193
[ "def", "median", "(", "data", ")", ":", "if", "len", "(", "data", ")", "==", "0", ":", "return", "None", "data", "=", "sorted", "(", "data", ")", "return", "float", "(", "(", "data", "[", "len", "(", "data", ")", "//", "2", "]", "+", "data", "[", "(", "len", "(", "data", ")", "-", "1", ")", "//", "2", "]", ")", "/", "2.", ")" ]
load deck p2th into local node via importprivke this allows building of proof - of - timeline for this deck
def load_deck_p2th_into_local_node ( provider : RpcNode , deck : Deck ) -> None : assert isinstance ( provider , RpcNode ) , { "error" : "You can load privkeys only into local node." } error = { "error" : "Deck P2TH import went wrong." } provider . importprivkey ( deck . p2th_wif , deck . id ) check_addr = provider . validateaddress ( deck . p2th_address ) if not check_addr [ "isvalid" ] and not check_addr [ "ismine" ] : raise DeckP2THImportError ( error )
5,979
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L213-L226
[ "def", "versor", "(", "v", ")", ":", "if", "isinstance", "(", "v", "[", "0", "]", ",", "np", ".", "ndarray", ")", ":", "return", "np", ".", "divide", "(", "v", ",", "mag", "(", "v", ")", "[", ":", ",", "None", "]", ")", "else", ":", "return", "v", "/", "mag", "(", "v", ")" ]
this function wraps all the card transfer parsing
def card_bundle_parser ( bundle : CardBundle , debug = False ) -> Iterator : try : # first vout of the bundle must pay to deck.p2th validate_card_transfer_p2th ( bundle . deck , bundle . vouts [ 0 ] ) # second vout must be OP_RETURN with card_metainfo card_metainfo = parse_card_transfer_metainfo ( read_tx_opreturn ( bundle . vouts [ 1 ] ) , bundle . deck . version ) # if any of this exceptions is raised, return None except ( InvalidCardTransferP2TH , CardVersionMismatch , CardNumberOfDecimalsMismatch , RecieverAmountMismatch , DecodeError , TypeError , InvalidNulldataOutput ) as e : if debug : print ( e ) # re-do as logging later on return yield # check for decimals if not card_metainfo [ "number_of_decimals" ] == bundle . deck . number_of_decimals : raise CardNumberOfDecimalsMismatch ( { "error" : "Number of decimals does not match." } ) # deduce the individual cards in the bundle cards = card_postprocess ( card_metainfo , bundle . vouts ) # drop the vouts property del bundle . __dict__ [ 'vouts' ] for c in cards : d = { * * c , * * bundle . __dict__ } try : yield CardTransfer ( * * d ) # issuing cards to issuing address is forbidden, # this will except the error except InvalidCardIssue as e : if debug : print ( e )
5,980
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pautils.py#L283-L333
[ "def", "timestamp", "(", "instance", ")", ":", "ts_re", "=", "re", ".", "compile", "(", "r\"^[0-9]{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\\.[0-9]+)?Z$\"", ")", "timestamp_props", "=", "[", "'created'", ",", "'modified'", "]", "if", "instance", "[", "'type'", "]", "in", "enums", ".", "TIMESTAMP_PROPERTIES", ":", "timestamp_props", "+=", "enums", ".", "TIMESTAMP_PROPERTIES", "[", "instance", "[", "'type'", "]", "]", "for", "tprop", "in", "timestamp_props", ":", "if", "tprop", "in", "instance", "and", "ts_re", ".", "match", "(", "instance", "[", "tprop", "]", ")", ":", "# Don't raise an error if schemas will catch it", "try", ":", "parser", ".", "parse", "(", "instance", "[", "tprop", "]", ")", "except", "ValueError", "as", "e", ":", "yield", "JSONError", "(", "\"'%s': '%s' is not a valid timestamp: %s\"", "%", "(", "tprop", ",", "instance", "[", "tprop", "]", ",", "str", "(", "e", ")", ")", ",", "instance", "[", "'id'", "]", ")", "if", "has_cyber_observable_data", "(", "instance", ")", ":", "for", "key", ",", "obj", "in", "instance", "[", "'objects'", "]", ".", "items", "(", ")", ":", "if", "'type'", "not", "in", "obj", ":", "continue", "if", "obj", "[", "'type'", "]", "in", "enums", ".", "TIMESTAMP_OBSERVABLE_PROPERTIES", ":", "for", "tprop", "in", "enums", ".", "TIMESTAMP_OBSERVABLE_PROPERTIES", "[", "obj", "[", "'type'", "]", "]", ":", "if", "tprop", "in", "obj", "and", "ts_re", ".", "match", "(", "obj", "[", "tprop", "]", ")", ":", "# Don't raise an error if schemas will catch it", "try", ":", "parser", ".", "parse", "(", "obj", "[", "tprop", "]", ")", "except", "ValueError", "as", "e", ":", "yield", "JSONError", "(", "\"'%s': '%s': '%s' is not a valid timestamp: %s\"", "%", "(", "obj", "[", "'type'", "]", ",", "tprop", ",", "obj", "[", "tprop", "]", ",", "str", "(", "e", ")", ")", ",", "instance", "[", "'id'", "]", ")", "if", "obj", "[", "'type'", "]", "in", "enums", ".", "TIMESTAMP_EMBEDDED_PROPERTIES", ":", "for", "embed", "in", "enums", ".", "TIMESTAMP_EMBEDDED_PROPERTIES", "[", "obj", "[", "'type'", "]", "]", ":", "if", "embed", "in", "obj", ":", "for", "tprop", "in", "enums", ".", "TIMESTAMP_EMBEDDED_PROPERTIES", "[", "obj", "[", "'type'", "]", "]", "[", "embed", "]", ":", "if", "embed", "==", "'extensions'", ":", "for", "ext", "in", "obj", "[", "embed", "]", ":", "if", "tprop", "in", "obj", "[", "embed", "]", "[", "ext", "]", "and", "ts_re", ".", "match", "(", "obj", "[", "embed", "]", "[", "ext", "]", "[", "tprop", "]", ")", ":", "try", ":", "parser", ".", "parse", "(", "obj", "[", "embed", "]", "[", "ext", "]", "[", "tprop", "]", ")", "except", "ValueError", "as", "e", ":", "yield", "JSONError", "(", "\"'%s': '%s': '%s': '%s' is not a valid timestamp: %s\"", "%", "(", "obj", "[", "'type'", "]", ",", "ext", ",", "tprop", ",", "obj", "[", "embed", "]", "[", "ext", "]", "[", "tprop", "]", ",", "str", "(", "e", ")", ")", ",", "instance", "[", "'id'", "]", ")", "elif", "tprop", "in", "obj", "[", "embed", "]", "and", "ts_re", ".", "match", "(", "obj", "[", "embed", "]", "[", "tprop", "]", ")", ":", "try", ":", "parser", ".", "parse", "(", "obj", "[", "embed", "]", "[", "tprop", "]", ")", "except", "ValueError", "as", "e", ":", "yield", "JSONError", "(", "\"'%s': '%s': '%s' is not a valid timestamp: %s\"", "%", "(", "obj", "[", "'type'", "]", ",", "tprop", ",", "obj", "[", "embed", "]", "[", "tprop", "]", ",", "str", "(", "e", ")", ")", ",", "instance", "[", "'id'", "]", ")" ]
Find the PAParams for a network by its long or short name . Raises UnsupportedNetwork if no PAParams is found .
def param_query ( name : str ) -> PAParams : for pa_params in params : if name in ( pa_params . network_name , pa_params . network_shortname , ) : return pa_params raise UnsupportedNetwork
5,981
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/pa_constants.py#L33-L42
[ "def", "WriteBlobs", "(", "self", ",", "blob_id_data_map", ",", "cursor", "=", "None", ")", ":", "chunks", "=", "[", "]", "for", "blob_id", ",", "blob", "in", "iteritems", "(", "blob_id_data_map", ")", ":", "chunks", ".", "extend", "(", "_BlobToChunks", "(", "blob_id", ".", "AsBytes", "(", ")", ",", "blob", ")", ")", "for", "values", "in", "_PartitionChunks", "(", "chunks", ")", ":", "_Insert", "(", "cursor", ",", "\"blobs\"", ",", "values", ")" ]
opens file dialog to load scripts into gui
def load_scripts ( self ) : # update scripts so that current settings do not get lost for index in range ( self . tree_scripts . topLevelItemCount ( ) ) : script_item = self . tree_scripts . topLevelItem ( index ) self . update_script_from_item ( script_item ) dialog = LoadDialog ( elements_type = "scripts" , elements_old = self . scripts , filename = self . gui_settings [ 'scripts_folder' ] ) if dialog . exec_ ( ) : self . gui_settings [ 'scripts_folder' ] = str ( dialog . txt_probe_log_path . text ( ) ) scripts = dialog . get_values ( ) added_scripts = set ( scripts . keys ( ) ) - set ( self . scripts . keys ( ) ) removed_scripts = set ( self . scripts . keys ( ) ) - set ( scripts . keys ( ) ) if 'data_folder' in list ( self . gui_settings . keys ( ) ) and os . path . exists ( self . gui_settings [ 'data_folder' ] ) : data_folder_name = self . gui_settings [ 'data_folder' ] else : data_folder_name = None # create instances of new instruments/scripts self . scripts , loaded_failed , self . instruments = Script . load_and_append ( script_dict = { name : scripts [ name ] for name in added_scripts } , scripts = self . scripts , instruments = self . instruments , log_function = self . log , data_path = data_folder_name ) # delete instances of new instruments/scripts that have been deselected for name in removed_scripts : del self . scripts [ name ]
5,982
https://github.com/LISE-B26/pylabcontrol/blob/67482e5157fcd1c40705e5c2cacfb93564703ed0/build/lib/pylabcontrol/src/gui/qt_b26_gui.py#L523-L558
[ "def", "from_text", "(", "text", ")", ":", "value", "=", "_by_text", ".", "get", "(", "text", ".", "upper", "(", ")", ")", "if", "value", "is", "None", ":", "match", "=", "_unknown_class_pattern", ".", "match", "(", "text", ")", "if", "match", "==", "None", ":", "raise", "UnknownRdataclass", "value", "=", "int", "(", "match", ".", "group", "(", "1", ")", ")", "if", "value", "<", "0", "or", "value", ">", "65535", ":", "raise", "ValueError", "(", "\"class must be between >= 0 and <= 65535\"", ")", "return", "value" ]
Returns the hash of the block at ; index 0 is the genesis block .
def getblockhash ( self , index : int ) -> str : return cast ( str , self . api_fetch ( 'getblockhash?index=' + str ( index ) ) )
5,983
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L73-L76
[ "def", "getMirrorTextureD3D11", "(", "self", ",", "eEye", ",", "pD3D11DeviceOrResource", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getMirrorTextureD3D11", "ppD3D11ShaderResourceView", "=", "c_void_p", "(", ")", "result", "=", "fn", "(", "eEye", ",", "pD3D11DeviceOrResource", ",", "byref", "(", "ppD3D11ShaderResourceView", ")", ")", "return", "result", ",", "ppD3D11ShaderResourceView", ".", "value" ]
Returns information about the block with the given hash .
def getblock ( self , hash : str ) -> dict : return cast ( dict , self . api_fetch ( 'getblock?hash=' + hash ) )
5,984
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L78-L81
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Returns information for given address .
def getaddress ( self , address : str ) -> dict : return cast ( dict , self . ext_fetch ( 'getaddress/' + address ) )
5,985
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L106-L109
[ "def", "restore_defaults_ratio", "(", "self", ")", ":", "# Set the flag to true because user ask to.", "self", ".", "is_restore_default", "=", "True", "# remove current default ratio", "for", "i", "in", "reversed", "(", "list", "(", "range", "(", "self", ".", "container_layout", ".", "count", "(", ")", ")", ")", ")", ":", "widget", "=", "self", ".", "container_layout", ".", "itemAt", "(", "i", ")", ".", "widget", "(", ")", "if", "widget", "is", "not", "None", ":", "widget", ".", "setParent", "(", "None", ")", "# reload default ratio", "self", ".", "restore_default_values_page", "(", ")" ]
Returns unspent transactions for given address .
def listunspent ( self , address : str ) -> list : try : return cast ( dict , self . ext_fetch ( 'listunspent/' + address ) ) [ 'unspent_outputs' ] except KeyError : raise InsufficientFunds ( 'Insufficient funds.' )
5,986
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L111-L117
[ "def", "_build_http_client", "(", "cls", ",", "session", ":", "AppSession", ")", ":", "# TODO:", "# recorder = self._build_recorder()", "stream_factory", "=", "functools", ".", "partial", "(", "HTTPStream", ",", "ignore_length", "=", "session", ".", "args", ".", "ignore_length", ",", "keep_alive", "=", "session", ".", "args", ".", "http_keep_alive", ")", "return", "session", ".", "factory", ".", "new", "(", "'HTTPClient'", ",", "connection_pool", "=", "session", ".", "factory", "[", "'ConnectionPool'", "]", ",", "stream_factory", "=", "stream_factory", ")" ]
Returns information about given transaction .
def txinfo ( self , txid : str ) -> dict : return cast ( dict , self . ext_fetch ( 'txinfo/' + txid ) )
5,987
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L141-L144
[ "def", "get_settings", "(", ")", ":", "s", "=", "getattr", "(", "settings", ",", "'CLAMAV_UPLOAD'", ",", "{", "}", ")", "s", "=", "{", "'CONTENT_TYPE_CHECK_ENABLED'", ":", "s", ".", "get", "(", "'CONTENT_TYPE_CHECK_ENABLED'", ",", "False", ")", ",", "# LAST_HANDLER is not a user configurable option; we return", "# it with the settings dict simply because it's convenient.", "'LAST_HANDLER'", ":", "getattr", "(", "settings", ",", "'FILE_UPLOAD_HANDLERS'", ")", "[", "-", "1", "]", "}", "return", "s" ]
Returns current balance of given address .
def getbalance ( self , address : str ) -> Decimal : try : return Decimal ( cast ( float , self . ext_fetch ( 'getbalance/' + address ) ) ) except TypeError : return Decimal ( 0 )
5,988
https://github.com/PeerAssets/pypeerassets/blob/8927b4a686887f44fe2cd9de777e2c827c948987/pypeerassets/provider/explorer.py#L146-L152
[ "def", "_build_http_client", "(", "cls", ",", "session", ":", "AppSession", ")", ":", "# TODO:", "# recorder = self._build_recorder()", "stream_factory", "=", "functools", ".", "partial", "(", "HTTPStream", ",", "ignore_length", "=", "session", ".", "args", ".", "ignore_length", ",", "keep_alive", "=", "session", ".", "args", ".", "http_keep_alive", ")", "return", "session", ".", "factory", ".", "new", "(", "'HTTPClient'", ",", "connection_pool", "=", "session", ".", "factory", "[", "'ConnectionPool'", "]", ",", "stream_factory", "=", "stream_factory", ")" ]
Extract subelement from obj according to pointer . It assums that document is the object .
def extract ( self , obj , bypass_ref = False ) : return self . pointer . extract ( obj , bypass_ref )
5,989
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/bases.py#L40-L48
[ "async", "def", "add_unknown_id", "(", "self", ",", "unknown_id", ",", "timeout", "=", "OTGW_DEFAULT_TIMEOUT", ")", ":", "cmd", "=", "OTGW_CMD_UNKNOWN_ID", "unknown_id", "=", "int", "(", "unknown_id", ")", "if", "unknown_id", "<", "1", "or", "unknown_id", ">", "255", ":", "return", "None", "ret", "=", "await", "self", ".", "_wait_for_cmd", "(", "cmd", ",", "unknown_id", ",", "timeout", ")", "if", "ret", "is", "not", "None", ":", "return", "int", "(", "ret", ")" ]
parse pointer into tokens
def parse ( self , pointer ) : if isinstance ( pointer , Pointer ) : return pointer . tokens [ : ] elif pointer == '' : return [ ] tokens = [ ] staged , _ , children = pointer . partition ( '/' ) if staged : try : token = StagesToken ( staged ) token . last = False tokens . append ( token ) except ValueError : raise ParseError ( 'pointer must start with / or int' , pointer ) if _ : for part in children . split ( '/' ) : part = part . replace ( '~1' , '/' ) part = part . replace ( '~0' , '~' ) token = ChildToken ( part ) token . last = False tokens . append ( token ) return tokens
5,990
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/bases.py#L96-L121
[ "def", "BuildChecks", "(", "self", ",", "request", ")", ":", "result", "=", "[", "]", "if", "request", ".", "HasField", "(", "\"start_time\"", ")", "or", "request", ".", "HasField", "(", "\"end_time\"", ")", ":", "def", "FilterTimestamp", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_mtime\"", ")", "and", "(", "file_stat", ".", "st_mtime", "<", "request", ".", "start_time", "or", "file_stat", ".", "st_mtime", ">", "request", ".", "end_time", ")", "result", ".", "append", "(", "FilterTimestamp", ")", "if", "request", ".", "HasField", "(", "\"min_file_size\"", ")", "or", "request", ".", "HasField", "(", "\"max_file_size\"", ")", ":", "def", "FilterSize", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_size\"", ")", "and", "(", "file_stat", ".", "st_size", "<", "request", ".", "min_file_size", "or", "file_stat", ".", "st_size", ">", "request", ".", "max_file_size", ")", "result", ".", "append", "(", "FilterSize", ")", "if", "request", ".", "HasField", "(", "\"perm_mode\"", ")", ":", "def", "FilterPerms", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "(", "file_stat", ".", "st_mode", "&", "request", ".", "perm_mask", ")", "!=", "request", ".", "perm_mode", "result", ".", "append", "(", "FilterPerms", ")", "if", "request", ".", "HasField", "(", "\"uid\"", ")", ":", "def", "FilterUID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_uid", "!=", "request", ".", "uid", "result", ".", "append", "(", "FilterUID", ")", "if", "request", ".", "HasField", "(", "\"gid\"", ")", ":", "def", "FilterGID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_gid", "!=", "request", ".", "gid", "result", ".", "append", "(", "FilterGID", ")", "if", "request", ".", "HasField", "(", "\"path_regex\"", ")", ":", "regex", "=", "request", ".", "path_regex", "def", "FilterPath", "(", "file_stat", ",", "regex", "=", "regex", ")", ":", "\"\"\"Suppress any filename not matching the regular expression.\"\"\"", "return", "not", "regex", ".", "Search", "(", "file_stat", ".", "pathspec", ".", "Basename", "(", ")", ")", "result", ".", "append", "(", "FilterPath", ")", "if", "request", ".", "HasField", "(", "\"data_regex\"", ")", ":", "def", "FilterData", "(", "file_stat", ",", "*", "*", "_", ")", ":", "\"\"\"Suppress files that do not match the content.\"\"\"", "return", "not", "self", ".", "TestFileContent", "(", "file_stat", ")", "result", ".", "append", "(", "FilterData", ")", "return", "result" ]
Extract subelement from obj according to tokens .
def extract ( self , obj , bypass_ref = False ) : for token in self . tokens : obj = token . extract ( obj , bypass_ref ) return obj
5,991
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/bases.py#L123-L132
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
Extract parent of obj according to current token .
def extract ( self , obj , bypass_ref = False ) : for i in range ( 0 , self . stages ) : try : obj = obj . parent_obj except AttributeError : raise UnstagedError ( obj , '{!r} must be staged before ' 'exploring its parents' . format ( obj ) ) if self . member : return obj . parent_member return obj
5,992
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/bases.py#L190-L205
[ "def", "delete", "(", "self", ",", "bulk_id", ")", ":", "collection_name", "=", "self", ".", "request", ".", "headers", ".", "get", "(", "\"collection\"", ")", "if", "not", "collection_name", ":", "self", ".", "raise_error", "(", "400", ",", "\"Missing a collection name header\"", ")", "self", ".", "revisions", "=", "BaseAsyncMotorDocument", "(", "\"%s_revisions\"", "%", "collection_name", ")", "self", ".", "logger", ".", "info", "(", "\"Deleting revisions with bulk_id %s\"", "%", "(", "bulk_id", ")", ")", "result", "=", "yield", "self", ".", "revisions", ".", "collection", ".", "remove", "(", "{", "\"meta.bulk_id\"", ":", "bulk_id", "}", ")", "self", ".", "write", "(", "result", ")" ]
Extract subelement from obj according to current token .
def extract ( self , obj , bypass_ref = False ) : try : if isinstance ( obj , Mapping ) : if not bypass_ref and '$ref' in obj : raise RefError ( obj , 'presence of a $ref member' ) obj = self . extract_mapping ( obj ) elif isinstance ( obj , Sequence ) and not isinstance ( obj , string_types ) : obj = self . extract_sequence ( obj ) else : raise WrongType ( obj , '{!r} does not apply ' 'for {!r}' . format ( str ( self ) , obj ) ) if isinstance ( obj , Mapping ) : if not bypass_ref and '$ref' in obj : raise RefError ( obj , 'presence of a $ref member' ) return obj except ExtractError as error : logger . exception ( error ) raise except Exception as error : logger . exception ( error ) args = [ arg for arg in error . args if arg not in ( self , obj ) ] raise ExtractError ( obj , * args )
5,993
https://github.com/johnnoone/json-spec/blob/f91981724cea0c366bd42a6670eb07bbe31c0e0c/src/jsonspec/pointer/bases.py#L212-L240
[ "def", "add_redirect", "(", "self", ",", "name", ",", "proto", ",", "host_ip", ",", "host_port", ",", "guest_ip", ",", "guest_port", ")", ":", "if", "not", "isinstance", "(", "name", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"name can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "proto", ",", "NATProtocol", ")", ":", "raise", "TypeError", "(", "\"proto can only be an instance of type NATProtocol\"", ")", "if", "not", "isinstance", "(", "host_ip", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"host_ip can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "host_port", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"host_port can only be an instance of type baseinteger\"", ")", "if", "not", "isinstance", "(", "guest_ip", ",", "basestring", ")", ":", "raise", "TypeError", "(", "\"guest_ip can only be an instance of type basestring\"", ")", "if", "not", "isinstance", "(", "guest_port", ",", "baseinteger", ")", ":", "raise", "TypeError", "(", "\"guest_port can only be an instance of type baseinteger\"", ")", "self", ".", "_call", "(", "\"addRedirect\"", ",", "in_p", "=", "[", "name", ",", "proto", ",", "host_ip", ",", "host_port", ",", "guest_ip", ",", "guest_port", "]", ")" ]
Create SHA - 1 hash get digest b64 encode split every 60 char .
def digester ( data ) : if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) hashof = hashlib . sha1 ( data ) . digest ( ) encoded_hash = base64 . b64encode ( hashof ) if not isinstance ( encoded_hash , six . string_types ) : encoded_hash = encoded_hash . decode ( 'utf_8' ) chunked = splitter ( encoded_hash , chunksize = 60 ) lines = '\n' . join ( chunked ) return lines
5,994
https://github.com/samstav/requests-chef/blob/a0bf013b925abd0cf76eeaf6300cf32659632773/requests_chef/mixlib_auth.py#L38-L48
[ "def", "_simulate_unitary", "(", "self", ",", "op", ":", "ops", ".", "Operation", ",", "data", ":", "_StateAndBuffer", ",", "indices", ":", "List", "[", "int", "]", ")", "->", "None", ":", "result", "=", "protocols", ".", "apply_unitary", "(", "op", ",", "args", "=", "protocols", ".", "ApplyUnitaryArgs", "(", "data", ".", "state", ",", "data", ".", "buffer", ",", "indices", ")", ")", "if", "result", "is", "data", ".", "buffer", ":", "data", ".", "buffer", "=", "data", ".", "state", "data", ".", "state", "=", "result" ]
Split an iterable that supports indexing into chunks of chunksize .
def splitter ( iterable , chunksize = 60 ) : return ( iterable [ 0 + i : chunksize + i ] for i in range ( 0 , len ( iterable ) , chunksize ) )
5,995
https://github.com/samstav/requests-chef/blob/a0bf013b925abd0cf76eeaf6300cf32659632773/requests_chef/mixlib_auth.py#L60-L63
[ "def", "parse_na", "(", "txt", ":", "str", ")", "->", "(", "MetarData", ",", "Units", ")", ":", "# type: ignore", "units", "=", "Units", "(", "*", "*", "NA_UNITS", ")", "# type: ignore", "clean", "=", "core", ".", "sanitize_report_string", "(", "txt", ")", "wxresp", "=", "{", "'raw'", ":", "txt", ",", "'sanitized'", ":", "clean", "}", "wxdata", ",", "wxresp", "[", "'remarks'", "]", "=", "core", ".", "get_remarks", "(", "clean", ")", "wxdata", ",", "wxresp", "[", "'runway_visibility'", "]", ",", "_", "=", "core", ".", "sanitize_report_list", "(", "wxdata", ")", "wxdata", ",", "wxresp", "[", "'station'", "]", ",", "wxresp", "[", "'time'", "]", "=", "core", ".", "get_station_and_time", "(", "wxdata", ")", "wxdata", ",", "wxresp", "[", "'clouds'", "]", "=", "core", ".", "get_clouds", "(", "wxdata", ")", "wxdata", ",", "wxresp", "[", "'wind_direction'", "]", ",", "wxresp", "[", "'wind_speed'", "]", ",", "wxresp", "[", "'wind_gust'", "]", ",", "wxresp", "[", "'wind_variable_direction'", "]", "=", "core", ".", "get_wind", "(", "wxdata", ",", "units", ")", "wxdata", ",", "wxresp", "[", "'altimeter'", "]", "=", "core", ".", "get_altimeter", "(", "wxdata", ",", "units", ",", "'NA'", ")", "wxdata", ",", "wxresp", "[", "'visibility'", "]", "=", "core", ".", "get_visibility", "(", "wxdata", ",", "units", ")", "wxresp", "[", "'other'", "]", ",", "wxresp", "[", "'temperature'", "]", ",", "wxresp", "[", "'dewpoint'", "]", "=", "core", ".", "get_temp_and_dew", "(", "wxdata", ")", "condition", "=", "core", ".", "get_flight_rules", "(", "wxresp", "[", "'visibility'", "]", ",", "core", ".", "get_ceiling", "(", "wxresp", "[", "'clouds'", "]", ")", ")", "# type: ignore", "wxresp", "[", "'flight_rules'", "]", "=", "FLIGHT_RULES", "[", "condition", "]", "wxresp", "[", "'remarks_info'", "]", "=", "remarks", ".", "parse", "(", "wxresp", "[", "'remarks'", "]", ")", "# type: ignore", "wxresp", "[", "'time'", "]", "=", "core", ".", "make_timestamp", "(", "wxresp", "[", "'time'", "]", ")", "# type: ignore", "return", "MetarData", "(", "*", "*", "wxresp", ")", ",", "units" ]
Return the canonical request string .
def canonical_request ( self , method , path , content , timestamp ) : request = collections . OrderedDict ( [ ( 'Method' , method . upper ( ) ) , ( 'Hashed Path' , path ) , ( 'X-Ops-Content-Hash' , content ) , ( 'X-Ops-Timestamp' , timestamp ) , ( 'X-Ops-UserId' , self . user_id ) , ] ) return '\n' . join ( [ '%s:%s' % ( key , value ) for key , value in request . items ( ) ] )
5,996
https://github.com/samstav/requests-chef/blob/a0bf013b925abd0cf76eeaf6300cf32659632773/requests_chef/mixlib_auth.py#L127-L137
[ "def", "_next_sample_index", "(", "self", ")", ":", "# Return the next streamer index where the streamer is not None,", "# wrapping around.", "idx", "=", "self", ".", "active_index_", "self", ".", "active_index_", "+=", "1", "if", "self", ".", "active_index_", ">=", "len", "(", "self", ".", "streams_", ")", ":", "self", ".", "active_index_", "=", "0", "# Continue to increment if this streamer is exhausted (None)", "# This should never be infinite looping;", "# the `_streamers_available` check happens immediately", "# before this, so there should always be at least one not-None", "# streamer.", "while", "self", ".", "streams_", "[", "idx", "]", "is", "None", ":", "idx", "=", "self", ".", "active_index_", "self", ".", "active_index_", "+=", "1", "if", "self", ".", "active_index_", ">=", "len", "(", "self", ".", "streams_", ")", ":", "self", ".", "active_index_", "=", "0", "return", "idx" ]
Return a PrivateKey instance .
def load_pem ( cls , private_key , password = None ) : # TODO(sam): try to break this in tests maybe_path = normpath ( private_key ) if os . path . isfile ( maybe_path ) : with open ( maybe_path , 'rb' ) as pkf : private_key = pkf . read ( ) if not isinstance ( private_key , six . binary_type ) : private_key = private_key . encode ( 'utf-8' ) pkey = serialization . load_pem_private_key ( private_key , password = password , backend = crypto_backends . default_backend ( ) ) return cls ( pkey )
5,997
https://github.com/samstav/requests-chef/blob/a0bf013b925abd0cf76eeaf6300cf32659632773/requests_chef/mixlib_auth.py#L159-L177
[ "def", "command_max_delay", "(", "self", ",", "event", "=", "None", ")", ":", "try", ":", "max_delay", "=", "self", ".", "max_delay_var", ".", "get", "(", ")", "except", "ValueError", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "if", "max_delay", "<", "0", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "if", "max_delay", ">", "0.1", ":", "max_delay", "=", "self", ".", "runtime_cfg", ".", "max_delay", "self", ".", "runtime_cfg", ".", "max_delay", "=", "max_delay", "self", ".", "max_delay_var", ".", "set", "(", "self", ".", "runtime_cfg", ".", "max_delay", ")" ]
Sign data with the private key and return the signed data .
def sign ( self , data , b64 = True ) : padder = padding . PKCS1v15 ( ) signer = self . private_key . signer ( padder , None ) if not isinstance ( data , six . binary_type ) : data = data . encode ( 'utf_8' ) signer . update ( data ) signed = signer . finalize ( ) if b64 : signed = base64 . b64encode ( signed ) return signed
5,998
https://github.com/samstav/requests-chef/blob/a0bf013b925abd0cf76eeaf6300cf32659632773/requests_chef/mixlib_auth.py#L179-L192
[ "def", "truncate", "(", "self", ",", "timeout", "=", "None", ")", ":", "client", "=", "self", ".", "_instance", ".", "_client", "table_admin_client", "=", "client", ".", "table_admin_client", "if", "timeout", ":", "table_admin_client", ".", "drop_row_range", "(", "self", ".", "name", ",", "delete_all_data_from_table", "=", "True", ",", "timeout", "=", "timeout", ")", "else", ":", "table_admin_client", ".", "drop_row_range", "(", "self", ".", "name", ",", "delete_all_data_from_table", "=", "True", ")" ]
r Dump python object to file .
def dump ( obj , fp , * * kw ) : xml = dumps ( obj , * * kw ) if isinstance ( fp , basestring ) : with open ( fp , 'w' ) as fobj : fobj . write ( xml ) else : fp . write ( xml )
5,999
https://github.com/heronotears/lazyxml/blob/e3f1ebd3f34cfa03d022ddec90e17d60c1c81953/lazyxml/__init__.py#L149-L180
[ "def", "disconnect", "(", "self", ",", "chassis_list", ")", ":", "self", ".", "_check_session", "(", ")", "if", "not", "isinstance", "(", "chassis_list", ",", "(", "list", ",", "tuple", ",", "set", ",", "dict", ",", "frozenset", ")", ")", ":", "chassis_list", "=", "(", "chassis_list", ",", ")", "if", "len", "(", "chassis_list", ")", "==", "1", ":", "self", ".", "_rest", ".", "delete_request", "(", "'connections'", ",", "chassis_list", "[", "0", "]", ")", "else", ":", "params", "=", "{", "chassis", ":", "True", "for", "chassis", "in", "chassis_list", "}", "params", "[", "'action'", "]", "=", "'disconnect'", "self", ".", "_rest", ".", "post_request", "(", "'connections'", ",", "None", ",", "params", ")" ]