query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Display or add a gene list .
def gene_list ( list_id = None ) : all_case_ids = [ case . case_id for case in app . db . cases ( ) ] if list_id : genelist_obj = app . db . gene_list ( list_id ) case_ids = [ case . case_id for case in app . db . cases ( ) if case not in genelist_obj . cases ] if genelist_obj is None : return abort ( 404 , "gene list not found: {}" . format ( list_id ) ) if 'download' in request . args : response = make_response ( '\n' . join ( genelist_obj . gene_ids ) ) filename = secure_filename ( "{}.txt" . format ( genelist_obj . list_id ) ) header = "attachment; filename={}" . format ( filename ) response . headers [ 'Content-Disposition' ] = header return response if request . method == 'POST' : if list_id : # link a case to the gene list case_ids = request . form . getlist ( 'case_id' ) for case_id in case_ids : case_obj = app . db . case ( case_id ) if case_obj not in genelist_obj . cases : genelist_obj . cases . append ( case_obj ) app . db . save ( ) else : # upload a new gene list req_file = request . files [ 'file' ] new_listid = ( request . form [ 'list_id' ] or secure_filename ( req_file . filename ) ) if app . db . gene_list ( new_listid ) : return abort ( 500 , 'Please provide a unique list name' ) if not req_file : return abort ( 500 , 'Please provide a file for upload' ) gene_ids = [ line for line in req_file . stream if not line . startswith ( '#' ) ] genelist_obj = app . db . add_genelist ( new_listid , gene_ids ) case_ids = all_case_ids return render_template ( 'gene_list.html' , gene_list = genelist_obj , case_ids = case_ids )
4,900
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L79-L123
[ "def", "deactivate", "(", "profile", "=", "'default'", ")", ":", "with", "jconfig", "(", "profile", ")", "as", "config", ":", "deact", "=", "True", "if", "not", "getattr", "(", "config", ".", "NotebookApp", ".", "contents_manager_class", ",", "'startswith'", ",", "lambda", "x", ":", "False", ")", "(", "'jupyterdrive'", ")", ":", "deact", "=", "False", "if", "'gdrive'", "not", "in", "getattr", "(", "config", ".", "NotebookApp", ".", "tornado_settings", ",", "'get'", ",", "lambda", "_", ",", "__", ":", "''", ")", "(", "'contents_js_source'", ",", "''", ")", ":", "deact", "=", "False", "if", "deact", ":", "del", "config", "[", "'NotebookApp'", "]", "[", "'tornado_settings'", "]", "[", "'contents_js_source'", "]", "del", "config", "[", "'NotebookApp'", "]", "[", "'contents_manager_class'", "]" ]
Delete a whole gene list with links to cases or a link .
def delete_genelist ( list_id , case_id = None ) : if case_id : # unlink a case from a gene list case_obj = app . db . case ( case_id ) app . db . remove_genelist ( list_id , case_obj = case_obj ) return redirect ( request . referrer ) else : # remove the whole gene list app . db . remove_genelist ( list_id ) return redirect ( url_for ( '.index' ) )
4,901
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L128-L138
[ "def", "setOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ",", "unTrackedDevice", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformTrackedDeviceRelative", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "unTrackedDevice", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackedDeviceToOverlayTransform" ]
Upload a new resource for an individual .
def resources ( ) : ind_id = request . form [ 'ind_id' ] upload_dir = os . path . abspath ( app . config [ 'UPLOAD_DIR' ] ) req_file = request . files [ 'file' ] filename = secure_filename ( req_file . filename ) file_path = os . path . join ( upload_dir , filename ) name = request . form [ 'name' ] or filename req_file . save ( file_path ) ind_obj = app . db . individual ( ind_id ) app . db . add_resource ( name , file_path , ind_obj ) return redirect ( request . referrer )
4,902
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L142-L155
[ "def", "volume_detach", "(", "self", ",", "name", ",", "timeout", "=", "300", ")", ":", "try", ":", "volume", "=", "self", ".", "volume_show", "(", "name", ")", "except", "KeyError", "as", "exc", ":", "raise", "SaltCloudSystemExit", "(", "'Unable to find {0} volume: {1}'", ".", "format", "(", "name", ",", "exc", ")", ")", "if", "not", "volume", "[", "'attachments'", "]", ":", "return", "True", "response", "=", "self", ".", "compute_conn", ".", "volumes", ".", "delete_server_volume", "(", "volume", "[", "'attachments'", "]", "[", "0", "]", "[", "'server_id'", "]", ",", "volume", "[", "'attachments'", "]", "[", "0", "]", "[", "'id'", "]", ")", "trycount", "=", "0", "start", "=", "time", ".", "time", "(", ")", "while", "True", ":", "trycount", "+=", "1", "try", ":", "response", "=", "self", ".", "_volume_get", "(", "volume", "[", "'id'", "]", ")", "if", "response", "[", "'status'", "]", "==", "'available'", ":", "return", "response", "except", "Exception", "as", "exc", ":", "log", ".", "debug", "(", "'Volume is detaching: %s'", ",", "name", ")", "time", ".", "sleep", "(", "1", ")", "if", "time", ".", "time", "(", ")", "-", "start", ">", "timeout", ":", "log", ".", "error", "(", "'Timed out after %d seconds '", "'while waiting for data'", ",", "timeout", ")", "return", "False", "log", ".", "debug", "(", "'Retrying volume_show() (try %d)'", ",", "trycount", ")" ]
Show a resource .
def resource ( resource_id ) : resource_obj = app . db . resource ( resource_id ) if 'raw' in request . args : return send_from_directory ( os . path . dirname ( resource_obj . path ) , os . path . basename ( resource_obj . path ) ) return render_template ( 'resource.html' , resource = resource_obj )
4,903
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L159-L167
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Upload a new comment .
def comments ( case_id ) : text = request . form [ 'text' ] variant_id = request . form . get ( 'variant_id' ) username = request . form . get ( 'username' ) case_obj = app . db . case ( case_id ) app . db . add_comment ( case_obj , text , variant_id = variant_id , username = username ) return redirect ( request . referrer )
4,904
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L184-L191
[ "def", "list_vmss_skus", "(", "access_token", ",", "subscription_id", ",", "resource_group", ",", "vmss_name", ")", ":", "endpoint", "=", "''", ".", "join", "(", "[", "get_rm_endpoint", "(", ")", ",", "'/subscriptions/'", ",", "subscription_id", ",", "'/resourceGroups/'", ",", "resource_group", ",", "'/providers/Microsoft.Compute/virtualMachineScaleSets/'", ",", "vmss_name", ",", "'/skus'", ",", "'?api-version='", ",", "COMP_API", "]", ")", "return", "do_get_next", "(", "endpoint", ",", "access_token", ")" ]
Show details for a specific individual .
def individual ( ind_id ) : individual_obj = app . db . individual ( ind_id ) return render_template ( 'individual.html' , individual = individual_obj )
4,905
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L209-L212
[ "def", "detach_storage", "(", "self", ",", "server", ",", "address", ")", ":", "body", "=", "{", "'storage_device'", ":", "{", "'address'", ":", "address", "}", "}", "url", "=", "'/server/{0}/storage/detach'", ".", "format", "(", "server", ")", "res", "=", "self", ".", "post_request", "(", "url", ",", "body", ")", "return", "Storage", ".", "_create_storage_objs", "(", "res", "[", "'server'", "]", "[", "'storage_devices'", "]", ",", "cloud_manager", "=", "self", ")" ]
Update the case synopsis .
def synopsis ( case_id ) : text = request . form [ 'text' ] case_obj = app . db . case ( case_id ) app . db . update_synopsis ( case_obj , text ) return redirect ( request . referrer )
4,906
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L216-L221
[ "def", "add_item", "(", "self", ",", "item_url", ",", "item_metadata", ")", ":", "c", "=", "self", ".", "conn", ".", "cursor", "(", ")", "c", ".", "execute", "(", "\"DELETE FROM items WHERE url=?\"", ",", "(", "str", "(", "item_url", ")", ",", ")", ")", "self", ".", "conn", ".", "commit", "(", ")", "c", ".", "execute", "(", "\"INSERT INTO items VALUES (?, ?, ?)\"", ",", "(", "str", "(", "item_url", ")", ",", "item_metadata", ",", "self", ".", "__now_iso_8601", "(", ")", ")", ")", "self", ".", "conn", ".", "commit", "(", ")", "c", ".", "close", "(", ")" ]
Make a new case out of a list of individuals .
def add_case ( ) : ind_ids = request . form . getlist ( 'ind_id' ) case_id = request . form [ 'case_id' ] source = request . form [ 'source' ] variant_type = request . form [ 'type' ] if len ( ind_ids ) == 0 : return abort ( 400 , "must add at least one member of case" ) # only GEMINI supported new_case = Case ( case_id = case_id , name = case_id , variant_source = source , variant_type = variant_type , variant_mode = 'gemini' ) # Add individuals to the correct case for ind_id in ind_ids : ind_obj = app . db . individual ( ind_id ) new_case . individuals . append ( ind_obj ) app . db . session . add ( new_case ) app . db . save ( ) return redirect ( url_for ( '.case' , case_id = new_case . name ) )
4,907
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/server/blueprints/public/views.py#L225-L247
[ "def", "refresh_table_metadata", "(", "self", ",", "keyspace", ",", "table", ",", "max_schema_agreement_wait", "=", "None", ")", ":", "if", "not", "self", ".", "control_connection", ".", "refresh_schema", "(", "target_type", "=", "SchemaTargetType", ".", "TABLE", ",", "keyspace", "=", "keyspace", ",", "table", "=", "table", ",", "schema_agreement_wait", "=", "max_schema_agreement_wait", ",", "force", "=", "True", ")", ":", "raise", "DriverException", "(", "\"Table metadata was not refreshed. See log for details.\"", ")" ]
Dummy print sub .
def print_sub ( tables ) : logger = logging . getLogger ( "meepo.sub.print_sub" ) logger . info ( "print_sub tables: %s" % ", " . join ( tables ) ) if not isinstance ( tables , ( list , set ) ) : raise ValueError ( "tables should be list or set" ) events = ( "%s_%s" % ( tb , action ) for tb , action in itertools . product ( * [ tables , [ "write" , "update" , "delete" ] ] ) ) for event in events : signal ( event ) . connect ( lambda pk : logger . info ( "%s -> %s" % event , pk ) , weak = False )
4,908
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/sub/dummy.py#L11-L26
[ "def", "surface_based_cape_cin", "(", "pressure", ",", "temperature", ",", "dewpoint", ")", ":", "p", ",", "t", ",", "td", ",", "profile", "=", "parcel_profile_with_lcl", "(", "pressure", ",", "temperature", ",", "dewpoint", ")", "return", "cape_cin", "(", "p", ",", "t", ",", "td", ",", "profile", ")" ]
Decode Erlang terms within binary data into Python types
def binary_to_term ( data ) : if not isinstance ( data , bytes ) : raise ParseException ( 'not bytes input' ) size = len ( data ) if size <= 1 : raise ParseException ( 'null input' ) if b_ord ( data [ 0 ] ) != _TAG_VERSION : raise ParseException ( 'invalid version' ) try : i , term = _binary_to_term ( 1 , data ) if i != size : raise ParseException ( 'unparsed data' ) return term except struct . error : raise ParseException ( 'missing data' ) except IndexError : raise ParseException ( 'missing data' )
4,909
https://github.com/okeuday/erlang_py/blob/81b7c2ace66b6bdee23602a6802efff541223fa3/erlang.py#L440-L459
[ "def", "calcLorenzDistance", "(", "self", ")", ":", "LorenzSim", "=", "np", ".", "mean", "(", "np", ".", "array", "(", "self", ".", "Lorenz_hist", ")", "[", "self", ".", "ignore_periods", ":", ",", ":", "]", ",", "axis", "=", "0", ")", "dist", "=", "np", ".", "sqrt", "(", "np", ".", "sum", "(", "(", "100", "*", "(", "LorenzSim", "-", "self", ".", "LorenzTarget", ")", ")", "**", "2", ")", ")", "self", ".", "LorenzDistance", "=", "dist", "return", "dist" ]
Encode Python types into Erlang terms in binary data
def term_to_binary ( term , compressed = False ) : data_uncompressed = _term_to_binary ( term ) if compressed is False : return b_chr ( _TAG_VERSION ) + data_uncompressed else : if compressed is True : compressed = 6 if compressed < 0 or compressed > 9 : raise InputException ( 'compressed in [0..9]' ) data_compressed = zlib . compress ( data_uncompressed , compressed ) size_uncompressed = len ( data_uncompressed ) if size_uncompressed > 4294967295 : raise OutputException ( 'uint32 overflow' ) return ( b_chr ( _TAG_VERSION ) + b_chr ( _TAG_COMPRESSED_ZLIB ) + struct . pack ( b'>I' , size_uncompressed ) + data_compressed )
4,910
https://github.com/okeuday/erlang_py/blob/81b7c2ace66b6bdee23602a6802efff541223fa3/erlang.py#L461-L480
[ "def", "_parse_options", "(", "self", ",", "options", ")", ":", "attributes", "=", "(", "'host'", ",", "'wapi_version'", ",", "'username'", ",", "'password'", ",", "'ssl_verify'", ",", "'http_request_timeout'", ",", "'max_retries'", ",", "'http_pool_connections'", ",", "'http_pool_maxsize'", ",", "'silent_ssl_warnings'", ",", "'log_api_calls_as_info'", ",", "'max_results'", ",", "'paging'", ")", "for", "attr", "in", "attributes", ":", "if", "isinstance", "(", "options", ",", "dict", ")", "and", "attr", "in", "options", ":", "setattr", "(", "self", ",", "attr", ",", "options", "[", "attr", "]", ")", "elif", "hasattr", "(", "options", ",", "attr", ")", ":", "value", "=", "getattr", "(", "options", ",", "attr", ")", "setattr", "(", "self", ",", "attr", ",", "value", ")", "elif", "attr", "in", "self", ".", "DEFAULT_OPTIONS", ":", "setattr", "(", "self", ",", "attr", ",", "self", ".", "DEFAULT_OPTIONS", "[", "attr", "]", ")", "else", ":", "msg", "=", "\"WAPI config error. Option %s is not defined\"", "%", "attr", "raise", "ib_ex", ".", "InfobloxConfigException", "(", "msg", "=", "msg", ")", "for", "attr", "in", "(", "'host'", ",", "'username'", ",", "'password'", ")", ":", "if", "not", "getattr", "(", "self", ",", "attr", ")", ":", "msg", "=", "\"WAPI config error. Option %s can not be blank\"", "%", "attr", "raise", "ib_ex", ".", "InfobloxConfigException", "(", "msg", "=", "msg", ")", "self", ".", "wapi_url", "=", "\"https://%s/wapi/v%s/\"", "%", "(", "self", ".", "host", ",", "self", ".", "wapi_version", ")", "self", ".", "cloud_api_enabled", "=", "self", ".", "is_cloud_wapi", "(", "self", ".", "wapi_version", ")" ]
Parsers a single line of text and returns an AudioClipSpec
def _parseLine ( cls , line ) : r = cls . _PROG . match ( line ) if not r : raise ValueError ( "Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line ) d = r . groupdict ( ) if len ( d [ 'begin' ] ) == 0 or len ( d [ 'end' ] ) == 0 : raise ValueError ( "Error: parsing '%s'. Correct: \"<number> <number> [<text>]\"" % line ) return AudioClipSpec ( d [ 'begin' ] , d [ 'end' ] , d [ 'text' ] . strip ( ) )
4,911
https://github.com/TiagoBras/audio-clip-extractor/blob/b0dd90266656dcbf7e663b3e174dce4d09e74c32/audioclipextractor/parser.py#L118-L136
[ "def", "_init_worker", "(", "X", ",", "X_shape", ",", "X_dtype", ")", ":", "# Using a dictionary is not strictly necessary. You can also", "# use global variables.", "mprotate_dict", "[", "\"X\"", "]", "=", "X", "mprotate_dict", "[", "\"X_shape\"", "]", "=", "X_shape", "mprotate_dict", "[", "\"X_dtype\"", "]", "=", "X_dtype" ]
Returns the tunnel mode s name for printing purpose .
def mode_name ( self ) : for name , id in self . MODES . iteritems ( ) : if id == self . mode : return name
4,912
https://github.com/Gawen/pytun/blob/a1e1f86a5e2b5ed256e3b87dcdd4f6aedc6cde6d/pytun.py#L106-L111
[ "def", "db_dict", "(", "c", ")", ":", "db_d", "=", "{", "}", "c", ".", "execute", "(", "'SELECT * FROM library_spectra'", ")", "db_d", "[", "'library_spectra'", "]", "=", "[", "list", "(", "row", ")", "for", "row", "in", "c", "]", "c", ".", "execute", "(", "'SELECT * FROM library_spectra_meta'", ")", "db_d", "[", "'library_spectra_meta'", "]", "=", "[", "list", "(", "row", ")", "for", "row", "in", "c", "]", "c", ".", "execute", "(", "'SELECT * FROM library_spectra_annotation'", ")", "db_d", "[", "'library_spectra_annotations'", "]", "=", "[", "list", "(", "row", ")", "for", "row", "in", "c", "]", "c", ".", "execute", "(", "'SELECT * FROM library_spectra_source'", ")", "db_d", "[", "'library_spectra_source'", "]", "=", "[", "list", "(", "row", ")", "for", "row", "in", "c", "]", "c", ".", "execute", "(", "'SELECT * FROM metab_compound'", ")", "db_d", "[", "'metab_compound'", "]", "=", "[", "list", "(", "row", ")", "for", "row", "in", "c", "]", "return", "db_d" ]
Create the tunnel . If the tunnel is already opened the function will raised an AlreadyOpened exception .
def open ( self ) : if self . fd is not None : raise self . AlreadyOpened ( ) logger . debug ( "Opening %s..." % ( TUN_KO_PATH , ) ) self . fd = os . open ( TUN_KO_PATH , os . O_RDWR ) logger . debug ( "Opening %s tunnel '%s'..." % ( self . mode_name . upper ( ) , self . pattern , ) ) try : ret = fcntl . ioctl ( self . fd , self . TUNSETIFF , struct . pack ( "16sH" , self . pattern , self . mode | self . no_pi ) ) except IOError , e : if e . errno == 1 : logger . error ( "Cannot open a %s tunnel because the operation is not permitted." % ( self . mode_name . upper ( ) , ) ) raise self . NotPermitted ( ) raise self . name = ret [ : 16 ] . strip ( "\x00" ) logger . info ( "Tunnel '%s' opened." % ( self . name , ) )
4,913
https://github.com/Gawen/pytun/blob/a1e1f86a5e2b5ed256e3b87dcdd4f6aedc6cde6d/pytun.py#L117-L142
[ "def", "_return_container_objects", "(", "self", ")", ":", "container_objects", "=", "self", ".", "job_args", ".", "get", "(", "'object'", ")", "if", "container_objects", ":", "return", "True", ",", "[", "{", "'container_object'", ":", "i", "}", "for", "i", "in", "container_objects", "]", "container_objects", "=", "self", ".", "job_args", ".", "get", "(", "'objects_file'", ")", "if", "container_objects", ":", "container_objects", "=", "os", ".", "path", ".", "expanduser", "(", "container_objects", ")", "if", "os", ".", "path", ".", "isfile", "(", "container_objects", ")", ":", "with", "open", "(", "container_objects", ")", "as", "f", ":", "return", "True", ",", "[", "{", "'container_object'", ":", "i", ".", "rstrip", "(", "'\\n'", ")", "}", "for", "i", "in", "f", ".", "readlines", "(", ")", "]", "container_objects", "=", "self", ".", "_list_contents", "(", ")", "pattern_match", "=", "self", ".", "job_args", ".", "get", "(", "'pattern_match'", ")", "if", "pattern_match", ":", "container_objects", "=", "self", ".", "match_filter", "(", "idx_list", "=", "container_objects", ",", "pattern", "=", "pattern_match", ",", "dict_type", "=", "True", ",", "dict_key", "=", "'name'", ")", "# Reformat list for processing", "if", "container_objects", "and", "isinstance", "(", "container_objects", "[", "0", "]", ",", "dict", ")", ":", "return", "False", ",", "self", ".", "_return_deque", "(", "[", "{", "'container_object'", ":", "i", "[", "'name'", "]", "}", "for", "i", "in", "container_objects", "]", ")", "else", ":", "return", "False", ",", "self", ".", "_return_deque", "(", ")" ]
Close the tunnel . If the tunnel is already closed or never opened do nothing .
def close ( self ) : if self . fd is None : return logger . debug ( "Closing tunnel '%s'..." % ( self . name or "" , ) ) # Close tun.ko file os . close ( self . fd ) self . fd = None logger . info ( "Tunnel '%s' closed." % ( self . name or "" , ) )
4,914
https://github.com/Gawen/pytun/blob/a1e1f86a5e2b5ed256e3b87dcdd4f6aedc6cde6d/pytun.py#L144-L159
[ "def", "_persist", "(", "source", ",", "path", ",", "component", "=", "None", ",", "storage_options", "=", "None", ",", "*", "*", "kwargs", ")", ":", "from", "dask", ".", "array", "import", "to_zarr", ",", "from_array", "from", ".", ".", "source", ".", "zarr", "import", "ZarrArraySource", "try", ":", "arr", "=", "source", ".", "to_dask", "(", ")", "except", "NotImplementedError", ":", "arr", "=", "from_array", "(", "source", ".", "read", "(", ")", ",", "chunks", "=", "-", "1", ")", ".", "rechunk", "(", "'auto'", ")", "to_zarr", "(", "arr", ",", "path", ",", "component", "=", "None", ",", "storage_options", "=", "storage_options", ",", "*", "*", "kwargs", ")", "source", "=", "ZarrArraySource", "(", "path", ",", "storage_options", ",", "component", ")", "return", "source" ]
Receive a buffer . The default size is 1500 the classical MTU .
def recv ( self , size = None ) : size = size if size is not None else 1500 return os . read ( self . fd , size )
4,915
https://github.com/Gawen/pytun/blob/a1e1f86a5e2b5ed256e3b87dcdd4f6aedc6cde6d/pytun.py#L165-L172
[ "def", "run", "(", "self", ")", ":", "logger", ".", "info", "(", "\"Initializing...\"", ")", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"initialize\"", ",", "\"()V\"", ")", "logger", ".", "info", "(", "\"Running...\"", ")", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"runExperiment\"", ",", "\"()V\"", ")", "logger", ".", "info", "(", "\"Finished...\"", ")", "javabridge", ".", "call", "(", "self", ".", "jobject", ",", "\"postProcess\"", ",", "\"()V\"", ")" ]
MLBAM dataset download
def download ( self ) : p = Pool ( ) p . map ( self . _download , self . days )
4,916
https://github.com/Shinichi-Nakagawa/pitchpx/blob/5747402a0b3416f5e910b479e100df858f0b6440/pitchpx/mlbam.py#L49-L54
[ "def", "_restart_session", "(", "self", ",", "session", ")", ":", "# remove old session key, if socket is None, that means the", "# session was closed by user and there is no need to restart.", "if", "session", ".", "socket", "is", "not", "None", ":", "self", ".", "log", ".", "info", "(", "\"Attempting restart session for Monitor Id %s.\"", "%", "session", ".", "monitor_id", ")", "del", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "session", ".", "stop", "(", ")", "session", ".", "start", "(", ")", "self", ".", "sessions", "[", "session", ".", "socket", ".", "fileno", "(", ")", "]", "=", "session" ]
Count how many times each individual salt bridge occured throughout the simulation . Returns numpy array .
def count_by_type ( self ) : saltbridges = defaultdict ( int ) for contact in self . timeseries : #count by residue name not by proteinring pkey = ( contact . ligandatomid , contact . ligandatomname , contact . resid , contact . resname , contact . segid ) saltbridges [ pkey ] += 1 dtype = [ ( "ligand_atom_id" , int ) , ( "ligand_atom_name" , "|U4" ) , ( "resid" , int ) , ( "resname" , "|U4" ) , ( "segid" , "|U8" ) , ( "frequency" , float ) ] out = np . empty ( ( len ( saltbridges ) , ) , dtype = dtype ) tsteps = float ( len ( self . timesteps ) ) for cursor , ( key , count ) in enumerate ( saltbridges . iteritems ( ) ) : out [ cursor ] = key + ( count / tsteps , ) return out . view ( np . recarray )
4,917
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/salt_bridges.py#L143-L156
[ "def", "AttachUserList", "(", "client", ",", "ad_group_id", ",", "user_list_id", ")", ":", "ad_group_criterion_service", "=", "client", ".", "GetService", "(", "'AdGroupCriterionService'", ",", "'v201809'", ")", "user_list", "=", "{", "'xsi_type'", ":", "'CriterionUserList'", ",", "'userListId'", ":", "user_list_id", "}", "ad_group_criterion", "=", "{", "'xsi_type'", ":", "'BiddableAdGroupCriterion'", ",", "'criterion'", ":", "user_list", ",", "'adGroupId'", ":", "ad_group_id", "}", "operations", "=", "[", "{", "'operator'", ":", "'ADD'", ",", "'operand'", ":", "ad_group_criterion", "}", "]", "return", "ad_group_criterion_service", ".", "mutate", "(", "operations", ")", "[", "'value'", "]", "[", "0", "]" ]
Count how many salt bridges occured in each frame . Returns numpy array .
def count_by_time ( self ) : out = np . empty ( ( len ( self . timesteps ) , ) , dtype = [ ( 'time' , float ) , ( 'count' , int ) ] ) for cursor , timestep in enumerate ( self . timesteps ) : out [ cursor ] = ( timestep , len ( [ x for x in self . timeseries if x . time == timestep ] ) ) return out . view ( np . recarray )
4,918
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/salt_bridges.py#L158-L164
[ "def", "delete_item", "(", "self", ",", "item", ")", ":", "try", ":", "self", ".", "dynamodb_client", ".", "delete_item", "(", "*", "*", "item", ")", "except", "botocore", ".", "exceptions", ".", "ClientError", "as", "error", ":", "handle_constraint_violation", "(", "error", ")" ]
Keep longest field among head and update .
def keep_longest ( head , update , down_path ) : if update is None : return 'f' if head is None : return 's' return 'f' if len ( head ) >= len ( update ) else 's'
4,919
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/config.py#L40-L48
[ "def", "handle_simulation_end", "(", "self", ",", "data_portal", ")", ":", "log", ".", "info", "(", "'Simulated {} trading days\\n'", "'first open: {}\\n'", "'last close: {}'", ",", "self", ".", "_session_count", ",", "self", ".", "_trading_calendar", ".", "session_open", "(", "self", ".", "_first_session", ")", ",", "self", ".", "_trading_calendar", ".", "session_close", "(", "self", ".", "_last_session", ")", ",", ")", "packet", "=", "{", "}", "self", ".", "end_of_simulation", "(", "packet", ",", "self", ".", "_ledger", ",", "self", ".", "_trading_calendar", ",", "self", ".", "_sessions", ",", "data_portal", ",", "self", ".", "_benchmark_source", ",", ")", "return", "packet" ]
Return comments for a case or variant .
def comments ( self , case_id = None , variant_id = None , username = None ) : logger . debug ( "Looking for comments" ) comment_objs = self . query ( Comment ) if case_id : comment_objs = comment_objs . filter_by ( case_id = case_id ) if variant_id : comment_objs = comment_objs . filter_by ( variant_id = variant_id ) elif case_id : comment_objs = comment_objs . filter_by ( variant_id = None ) return comment_objs
4,920
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/comment.py#L10-L28
[ "def", "setOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ",", "unTrackedDevice", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setOverlayTransformTrackedDeviceRelative", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "unTrackedDevice", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "pmatTrackedDeviceToOverlayTransform" ]
Add a comment to a variant or a case
def add_comment ( self , case_obj , text , variant_id = None , username = None ) : comment = Comment ( text = text , username = username or 'Anonymous' , case = case_obj , # md5 sum of chrom, pos, ref, alt variant_id = variant_id ) self . session . add ( comment ) self . save ( ) return comment
4,921
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/comment.py#L38-L50
[ "def", "clean_registration_ids", "(", "self", ",", "registration_ids", "=", "[", "]", ")", ":", "valid_registration_ids", "=", "[", "]", "for", "registration_id", "in", "registration_ids", ":", "details", "=", "self", ".", "registration_info_request", "(", "registration_id", ")", "if", "details", ".", "status_code", "==", "200", ":", "valid_registration_ids", ".", "append", "(", "registration_id", ")", "return", "valid_registration_ids" ]
Add the consequences found for a variant
def _add_consequences ( self , variant_obj , raw_variant_line ) : consequences = [ ] for consequence in SO_TERMS : if consequence in raw_variant_line : consequences . append ( consequence ) variant_obj . consequences = consequences
4,922
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/vcf/mixins/variant_extras/consequences.py#L10-L22
[ "def", "_post_request", "(", "self", ",", "url", ",", "headers", ",", "data", "=", "None", ")", ":", "# Grab file from data.", "files", "=", "None", "for", "field", ",", "value", "in", "data", ":", "if", "field", "==", "'file'", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "files", "=", "value", "else", ":", "files", "=", "{", "'file'", ":", "value", "}", "break", "# Remove file entry from data.", "data", "[", ":", "]", "=", "[", "tup", "for", "tup", "in", "data", "if", "tup", "[", "0", "]", "!=", "'file'", "]", "return", "self", ".", "_session", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "data", ",", "files", "=", "files", ")" ]
colleziona elementi per le liste .
def collect_appendvars ( ap_ , cls ) : for key , value in cls . __dict__ . items ( ) : if key . startswith ( 'appendvars_' ) : varname = key [ 11 : ] if varname not in ap_ . appendvars : ap_ . appendvars [ varname ] = [ ] if value not in ap_ . appendvars [ varname ] : if not isinstance ( value , list ) : value = [ value ] ap_ . appendvars [ varname ] += value
4,923
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L88-L100
[ "def", "run_clients", "(", ")", ":", "clients", "=", "request", ".", "form", ".", "get", "(", "'clients'", ")", "if", "not", "clients", ":", "return", "jsonify", "(", "{", "'Error'", ":", "'no clients provided'", "}", ")", "result", "=", "{", "}", "for", "client_id", "in", "clients", ".", "split", "(", "','", ")", ":", "if", "client_id", "not", "in", "drivers", ":", "init_client", "(", "client_id", ")", "init_timer", "(", "client_id", ")", "result", "[", "client_id", "]", "=", "get_client_info", "(", "client_id", ")", "return", "jsonify", "(", "result", ")" ]
Verifica se ci sono shared .
def has_shared ( arg , shared ) : try : if isinstance ( shared , list ) : shared_arguments = shared else : shared_arguments = shared . __shared_arguments__ for idx , ( args , kwargs ) in enumerate ( shared_arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
4,924
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L136-L153
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Verifica se ci sono argument con la classe .
def has_argument ( arg , arguments ) : try : if not isinstance ( arguments , list ) : arguments = arguments . __arguments__ for idx , ( args , kwargs ) in enumerate ( arguments ) : arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) if arg_name == arg : return idx idx = False except ( ValueError , AttributeError ) : idx = False return idx
4,925
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L156-L171
[ "def", "parse_registries", "(", "filesystem", ",", "registries", ")", ":", "results", "=", "{", "}", "for", "path", "in", "registries", ":", "with", "NamedTemporaryFile", "(", "buffering", "=", "0", ")", "as", "tempfile", ":", "filesystem", ".", "download", "(", "path", ",", "tempfile", ".", "name", ")", "registry", "=", "RegistryHive", "(", "tempfile", ".", "name", ")", "registry", ".", "rootkey", "=", "registry_root", "(", "path", ")", "results", ".", "update", "(", "{", "k", ".", "path", ":", "(", "k", ".", "timestamp", ",", "k", ".", "values", ")", "for", "k", "in", "registry", ".", "keys", "(", ")", "}", ")", "return", "results" ]
Recupera gli argomenti dalla funzione stessa .
def get_functarguments ( func ) : argspec = inspect . getargspec ( func ) if argspec . defaults is not None : args = argspec . args [ : - len ( argspec . defaults ) ] kwargs = dict ( zip ( argspec . args [ - len ( argspec . defaults ) : ] , argspec . defaults ) ) else : args = argspec . args kwargs = { } if args and args [ 0 ] == 'self' : args . pop ( 0 ) func . __named__ = [ ] arguments = [ ] shared = get_shared ( func ) for arg in args : if has_shared ( arg , shared ) is not False : continue if has_argument ( arg , func . __cls__ ) is not False : continue arguments . append ( ( [ arg ] , { } , ) ) func . __named__ . append ( arg ) for key , val in kwargs . items ( ) : if has_shared ( key , shared ) is not False : continue if has_argument ( key , func . __cls__ ) is not False : continue if isinstance ( val , dict ) : flags = [ val . pop ( 'lflag' , '--%s' % key ) ] short = val . pop ( 'flag' , None ) dest = val . get ( 'dest' , key ) . replace ( '-' , '_' ) if short : flags . insert ( 0 , short ) else : flags = [ '--%s' % key ] val = dict ( default = val ) dest = key . replace ( '-' , '_' ) func . __named__ . append ( dest ) arguments . append ( ( flags , val , ) ) return arguments
4,926
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L174-L215
[ "def", "delete_downloads", "(", ")", ":", "shutil", ".", "rmtree", "(", "vtki", ".", "EXAMPLES_PATH", ")", "os", ".", "makedirs", "(", "vtki", ".", "EXAMPLES_PATH", ")", "return", "True" ]
Imposta il parser .
def get_parser ( func , parent ) : parser = parent . add_parser ( func . __cmd_name__ , help = func . __doc__ ) for args , kwargs in func . __arguments__ : parser . add_argument ( * args , * * kwargs ) return parser
4,927
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L218-L225
[ "def", "note_on", "(", "self", ",", "channel", ",", "note", ",", "velocity", ")", ":", "return", "self", ".", "midi_event", "(", "NOTE_ON", ",", "channel", ",", "note", ",", "velocity", ")" ]
return shared .
def get_shared ( func ) : shared = [ ] if not hasattr ( func , '__cls__' ) : return shared if not hasattr ( func . __cls__ , '__shared_arguments__' ) : return shared if hasattr ( func , '__no_share__' ) : if func . __no_share__ is True : return shared else : shared += [ s for s in func . __cls__ . __shared_arguments__ if ( s [ 0 ] [ - 1 ] . replace ( '--' , '' ) . replace ( '-' , '_' ) ) not in func . __no_share__ ] else : shared = func . __cls__ . __shared_arguments__ return shared
4,928
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L228-L247
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Set subcommands .
def set_subcommands ( func , parser ) : if hasattr ( func , '__subcommands__' ) and func . __subcommands__ : sub_parser = parser . add_subparsers ( title = SUBCOMMANDS_LIST_TITLE , dest = 'subcommand' , description = SUBCOMMANDS_LIST_DESCRIPTION . format ( func . __cmd_name__ ) , help = func . __doc__ ) for sub_func in func . __subcommands__ . values ( ) : parser = get_parser ( sub_func , sub_parser ) for args , kwargs in get_shared ( sub_func ) : parser . add_argument ( * args , * * kwargs ) else : for args , kwargs in get_shared ( func ) : parser . add_argument ( * args , * * kwargs )
4,929
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L250-L266
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
check know args in argv .
def check_help ( ) : # know arguments know = set ( ( '-h' , '--help' , '-v' , '--version' ) ) # arguments args = set ( sys . argv [ 1 : ] ) # returns True if there is at least one known argument in arguments return len ( know . intersection ( args ) ) > 0
4,930
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/utils.py#L269-L278
[ "def", "_configure_manager", "(", "self", ")", ":", "self", ".", "_manager", "=", "CloudBlockStorageManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolume", ",", "response_key", "=", "\"volume\"", ",", "uri_base", "=", "\"volumes\"", ")", "self", ".", "_types_manager", "=", "BaseManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolumeType", ",", "response_key", "=", "\"volume_type\"", ",", "uri_base", "=", "\"types\"", ")", "self", ".", "_snapshot_manager", "=", "CloudBlockStorageSnapshotManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageSnapshot", ",", "response_key", "=", "\"snapshot\"", ",", "uri_base", "=", "\"snapshots\"", ")" ]
The classes and function that deal with protein - ligand interaction analysis .
def analysis_of_prot_lig_interactions ( self ) : self . hbonds = HBonds ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff , distance = 3 ) self . pistacking = PiStacking ( self . topol_data , self . trajectory , self . start , self . end , self . skip , self . analysis_cutoff ) self . sasa = SASA ( self . topol_data , self . trajectory ) self . lig_descr = LigDescr ( self . topol_data ) if self . trajectory != [ ] : self . rmsf = RMSF_measurements ( self . topol_data , self . topology , self . trajectory , self . ligand , self . start , self . end , self . skip ) self . salt_bridges = SaltBridges ( self . topol_data , self . trajectory , self . lig_descr , self . start , self . end , self . skip , self . analysis_cutoff )
4,931
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/lintools.py#L87-L97
[ "def", "get_registered_configs", "(", "self", ",", "instances", "=", "None", ")", ":", "configs", "=", "self", ".", "state", ".", "get", "(", "'config_files'", ",", "{", "}", ")", "if", "instances", "is", "not", "None", ":", "for", "config_file", ",", "config", "in", "configs", ".", "items", "(", ")", ":", "if", "config", "[", "'instance_name'", "]", "not", "in", "instances", ":", "configs", ".", "pop", "(", "config_file", ")", "return", "configs" ]
Saves all output from LINTools run in a single directory named after the output name .
def save_files ( self ) : while True : try : os . mkdir ( self . output_name ) except Exception as e : self . output_name = raw_input ( "This directory already exists - please enter a new name:" ) else : break self . workdir = os . getcwd ( ) os . chdir ( self . workdir + "/" + self . output_name )
4,932
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/lintools.py#L120-L130
[ "def", "exclude_types", "(", "self", ",", "*", "objs", ")", ":", "for", "o", "in", "objs", ":", "for", "t", "in", "_keytuple", "(", "o", ")", ":", "if", "t", "and", "t", "not", "in", "self", ".", "_excl_d", ":", "self", ".", "_excl_d", "[", "t", "]", "=", "0" ]
Removes intermediate files .
def remove_files ( self ) : file_list = [ "molecule.svg" , "lig.pdb" , "HIS.pdb" , "PHE.pdb" , "TRP.pdb" , "TYR.pdb" , "lig.mol" , "test.xtc" ] for residue in self . topol_data . dict_of_plotted_res . keys ( ) : file_list . append ( residue [ 1 ] + residue [ 2 ] + ".svg" ) for f in file_list : if os . path . isfile ( f ) == True : os . remove ( f )
4,933
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/lintools.py#L162-L169
[ "def", "get_base_url", "(", "self", ",", "request", ")", ":", "url", "=", "self", ".", "base_url", "+", "request", ".", "service_type", ".", "value", "# These 2 lines are temporal and will be removed after the use of uswest url wont be required anymore:", "if", "hasattr", "(", "request", ",", "'data_source'", ")", "and", "request", ".", "data_source", ".", "is_uswest_source", "(", ")", ":", "url", "=", "'https://services-uswest2.sentinel-hub.com/ogc/{}'", ".", "format", "(", "request", ".", "service_type", ".", "value", ")", "if", "hasattr", "(", "request", ",", "'data_source'", ")", "and", "request", ".", "data_source", "not", "in", "DataSource", ".", "get_available_sources", "(", ")", ":", "raise", "ValueError", "(", "\"{} is not available for service at ogc_base_url={}\"", ".", "format", "(", "request", ".", "data_source", ",", "SHConfig", "(", ")", ".", "ogc_base_url", ")", ")", "return", "url" ]
Initialize the driver by setting up GPIO interrupts and periodic statistics processing .
def setup ( self ) : # Initialize the statistics variables. self . radiation_count = 0 self . noise_count = 0 self . count = 0 # Initialize count_history[]. self . count_history = [ 0 ] * HISTORY_LENGTH self . history_index = 0 # Init measurement time. self . previous_time = millis ( ) self . previous_history_time = millis ( ) self . duration = 0 # Init the GPIO context. GPIO . setup ( self . radiation_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) GPIO . setup ( self . noise_pin , GPIO . IN , pull_up_down = GPIO . PUD_UP ) # Register local callbacks. GPIO . add_event_detect ( self . radiation_pin , GPIO . FALLING , callback = self . _on_radiation ) GPIO . add_event_detect ( self . noise_pin , GPIO . FALLING , callback = self . _on_noise ) # Enable the timer for processing the statistics periodically. self . _enable_timer ( ) return self
4,934
https://github.com/MonsieurV/PiPocketGeiger/blob/b0e7c303df46deeea3715fb8da3ebbefaf660f91/PiPocketGeiger/__init__.py#L91-L115
[ "def", "get_field_names", "(", "self", ")", ":", "col_model", "=", "self", ".", "request", ".", "get", "(", "\"colModel\"", ",", "None", ")", "if", "not", "col_model", ":", "return", "[", "\"UID\"", ",", "]", "names", "=", "[", "]", "col_model", "=", "json", ".", "loads", "(", "_u", "(", "col_model", ")", ")", "if", "isinstance", "(", "col_model", ",", "(", "list", ",", "tuple", ")", ")", ":", "names", "=", "map", "(", "lambda", "c", ":", "c", ".", "get", "(", "\"columnName\"", ",", "\"\"", ")", ".", "strip", "(", ")", ",", "col_model", ")", "# UID is used by reference widget to know the object that the user", "# selected from the popup list", "if", "\"UID\"", "not", "in", "names", ":", "names", ".", "append", "(", "\"UID\"", ")", "return", "filter", "(", "None", ",", "names", ")" ]
Load a variant source into the database .
def load ( ctx , variant_source , family_file , family_type , root ) : root = root or ctx . obj . get ( 'root' ) or os . path . expanduser ( "~/.puzzle" ) if os . path . isfile ( root ) : logger . error ( "'root' can't be a file" ) ctx . abort ( ) logger . info ( "Root directory is: {}" . format ( root ) ) db_path = os . path . join ( root , 'puzzle_db.sqlite3' ) logger . info ( "db path is: {}" . format ( db_path ) ) if not os . path . exists ( db_path ) : logger . warn ( "database not initialized, run 'puzzle init'" ) ctx . abort ( ) if not os . path . isfile ( variant_source ) : logger . error ( "Variant source has to be a file" ) ctx . abort ( ) mode = get_file_type ( variant_source ) if mode == 'unknown' : logger . error ( "Unknown file type" ) ctx . abort ( ) #Test if gemini is installed elif mode == 'gemini' : logger . debug ( "Initialzing GEMINI plugin" ) if not GEMINI : logger . error ( "Need to have gemini installed to use gemini plugin" ) ctx . abort ( ) logger . debug ( 'Set puzzle backend to {0}' . format ( mode ) ) variant_type = get_variant_type ( variant_source ) logger . debug ( 'Set variant type to {0}' . format ( variant_type ) ) cases = get_cases ( variant_source = variant_source , case_lines = family_file , case_type = family_type , variant_type = variant_type , variant_mode = mode ) if len ( cases ) == 0 : logger . warning ( "No cases found" ) ctx . abort ( ) logger . info ( "Initializing sqlite plugin" ) store = SqlStore ( db_path ) for case_obj in cases : if store . case ( case_obj . case_id ) is not None : logger . warn ( "{} already exists in the database" . format ( case_obj . case_id ) ) continue # extract case information logger . debug ( "adding case: {} to puzzle db" . format ( case_obj . case_id ) ) store . add_case ( case_obj , vtype = variant_type , mode = mode )
4,935
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/cli/load.py#L30-L97
[ "def", "_connect", "(", "self", ")", ":", "try", ":", "# Open Connection", "self", ".", "influx", "=", "InfluxDBClient", "(", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "username", ",", "self", ".", "password", ",", "self", ".", "database", ",", "self", ".", "ssl", ")", "# Log", "self", ".", "log", ".", "debug", "(", "\"InfluxdbHandler: Established connection to \"", "\"%s:%d/%s.\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ")", "except", "Exception", "as", "ex", ":", "# Log Error", "self", ".", "_throttle_error", "(", "\"InfluxdbHandler: Failed to connect to \"", "\"%s:%d/%s. %s\"", ",", "self", ".", "hostname", ",", "self", ".", "port", ",", "self", ".", "database", ",", "ex", ")", "# Close Socket", "self", ".", "_close", "(", ")", "return" ]
Retrieve deal s associated contacts
def list ( self , deal_id , * * params ) : _ , _ , associated_contacts = self . http_client . get ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , params = params ) return associated_contacts
4,936
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L65-L79
[ "def", "check_video", "(", "file_name", ",", "mediainfo_path", "=", "None", ")", ":", "D", "=", "call_MediaInfo", "(", "file_name", ",", "mediainfo_path", ")", "err_msg", "=", "\"Could not determine all video paramters\"", "if", "(", "\"General\"", "not", "in", "D", ")", "or", "(", "\"Video\"", "not", "in", "D", ")", ":", "raise", "MediaInfoError", "(", "err_msg", ")", "general_keys", "=", "(", "\"Count of audio streams\"", ",", "\"File size\"", ",", "\"Overall bit rate\"", ")", "if", "any", "(", "k", "not", "in", "D", "[", "\"General\"", "]", "for", "k", "in", "general_keys", ")", ":", "raise", "MediaInfoError", "(", "err_msg", ")", "video_keys", "=", "(", "\"Format profile\"", ",", "\"Commercial name\"", ",", "\"Frame rate\"", ",", "\"Height\"", ",", "\"Scan type\"", ",", ")", "if", "any", "(", "k", "not", "in", "D", "[", "\"Video\"", "]", "for", "k", "in", "video_keys", ")", ":", "raise", "MediaInfoError", "(", "err_msg", ")", "return", "D" ]
Create an associated contact
def create ( self , deal_id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for AssociatedContact are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , associated_contact = self . http_client . post ( "/deals/{deal_id}/associated_contacts" . format ( deal_id = deal_id ) , body = attributes ) return associated_contact
4,937
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L81-L103
[ "def", "build_request", "(", "headers", ":", "Headers", ")", "->", "str", ":", "raw_key", "=", "bytes", "(", "random", ".", "getrandbits", "(", "8", ")", "for", "_", "in", "range", "(", "16", ")", ")", "key", "=", "base64", ".", "b64encode", "(", "raw_key", ")", ".", "decode", "(", ")", "headers", "[", "\"Upgrade\"", "]", "=", "\"websocket\"", "headers", "[", "\"Connection\"", "]", "=", "\"Upgrade\"", "headers", "[", "\"Sec-WebSocket-Key\"", "]", "=", "key", "headers", "[", "\"Sec-WebSocket-Version\"", "]", "=", "\"13\"", "return", "key" ]
Remove an associated contact
def destroy ( self , deal_id , contact_id ) : status_code , _ , _ = self . http_client . delete ( "/deals/{deal_id}/associated_contacts/{contact_id}" . format ( deal_id = deal_id , contact_id = contact_id ) ) return status_code == 204
4,938
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L105-L121
[ "def", "build_request", "(", "headers", ":", "Headers", ")", "->", "str", ":", "raw_key", "=", "bytes", "(", "random", ".", "getrandbits", "(", "8", ")", "for", "_", "in", "range", "(", "16", ")", ")", "key", "=", "base64", ".", "b64encode", "(", "raw_key", ")", ".", "decode", "(", ")", "headers", "[", "\"Upgrade\"", "]", "=", "\"websocket\"", "headers", "[", "\"Connection\"", "]", "=", "\"Upgrade\"", "headers", "[", "\"Sec-WebSocket-Key\"", "]", "=", "key", "headers", "[", "\"Sec-WebSocket-Version\"", "]", "=", "\"13\"", "return", "key" ]
Retrieve all contacts
def list ( self , * * params ) : _ , _ , contacts = self . http_client . get ( "/contacts" , params = params ) return contacts
4,939
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L148-L161
[ "def", "vn_release", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "call", "!=", "'function'", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_reserve function must be called with -f or --function.'", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "vn_id", "=", "kwargs", ".", "get", "(", "'vn_id'", ",", "None", ")", "vn_name", "=", "kwargs", ".", "get", "(", "'vn_name'", ",", "None", ")", "path", "=", "kwargs", ".", "get", "(", "'path'", ",", "None", ")", "data", "=", "kwargs", ".", "get", "(", "'data'", ",", "None", ")", "if", "vn_id", ":", "if", "vn_name", ":", "log", ".", "warning", "(", "'Both the \\'vn_id\\' and \\'vn_name\\' arguments were provided. '", "'\\'vn_id\\' will take precedence.'", ")", "elif", "vn_name", ":", "vn_id", "=", "get_vn_id", "(", "kwargs", "=", "{", "'name'", ":", "vn_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires a \\'vn_id\\' or a \\'vn_name\\' to '", "'be provided.'", ")", "if", "data", ":", "if", "path", ":", "log", ".", "warning", "(", "'Both the \\'data\\' and \\'path\\' arguments were provided. '", "'\\'data\\' will take precedence.'", ")", "elif", "path", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "path", ",", "mode", "=", "'r'", ")", "as", "rfh", ":", "data", "=", "rfh", ".", "read", "(", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires either \\'data\\' or a \\'path\\' to '", "'be provided.'", ")", "server", ",", "user", ",", "password", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "vn", ".", "release", "(", "auth", ",", "int", "(", "vn_id", ")", ",", "data", ")", "ret", "=", "{", "'action'", ":", "'vn.release'", ",", "'released'", ":", "response", "[", "0", "]", ",", "'resource_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", ",", "}", "return", "ret" ]
Retrieve a single contact
def retrieve ( self , id ) : _ , _ , contact = self . http_client . get ( "/contacts/{id}" . format ( id = id ) ) return contact
4,940
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L186-L200
[ "def", "command", "(", "state", ",", "args", ")", ":", "args", "=", "parser", ".", "parse_args", "(", "args", "[", "1", ":", "]", ")", "where_queries", "=", "[", "]", "params", "=", "{", "}", "if", "args", ".", "watching", "or", "args", ".", "available", ":", "where_queries", ".", "append", "(", "'regexp IS NOT NULL'", ")", "if", "args", ".", "query", ":", "where_queries", ".", "append", "(", "'title LIKE :title'", ")", "params", "[", "'title'", "]", "=", "_compile_sql_query", "(", "args", ".", "query", ")", "if", "not", "where_queries", ":", "print", "(", "'Must include at least one filter.'", ")", "return", "where_query", "=", "' AND '", ".", "join", "(", "where_queries", ")", "logger", ".", "debug", "(", "'Search where %s with params %s'", ",", "where_query", ",", "params", ")", "results", "=", "list", "(", ")", "all_files", "=", "[", "filename", "for", "filename", "in", "_find_files", "(", "state", ".", "config", "[", "'anime'", "]", ".", "getpath", "(", "'watchdir'", ")", ")", "if", "_is_video", "(", "filename", ")", "]", "for", "anime", "in", "query", ".", "select", ".", "select", "(", "state", ".", "db", ",", "where_query", ",", "params", ")", ":", "logger", ".", "debug", "(", "'For anime %s with regexp %s'", ",", "anime", ".", "aid", ",", "anime", ".", "regexp", ")", "if", "anime", ".", "regexp", "is", "not", "None", ":", "anime_files", "=", "AnimeFiles", "(", "anime", ".", "regexp", ",", "all_files", ")", "logger", ".", "debug", "(", "'Found files %s'", ",", "anime_files", ".", "filenames", ")", "query", ".", "files", ".", "cache_files", "(", "state", ".", "db", ",", "anime", ".", "aid", ",", "anime_files", ")", "available", "=", "anime_files", ".", "available_string", "(", "anime", ".", "watched_episodes", ")", "else", ":", "available", "=", "''", "if", "not", "args", ".", "available", "or", "available", ":", "results", ".", "append", "(", "(", "anime", ".", "aid", ",", "anime", ".", "title", ",", "anime", ".", "type", ",", "'{}/{}'", ".", "format", "(", "anime", ".", "watched_episodes", ",", "anime", ".", "episodecount", ")", ",", "'yes'", "if", "anime", ".", "complete", "else", "''", ",", "available", ",", ")", ")", "state", ".", "results", "[", "'db'", "]", ".", "set", "(", "results", ")", "state", ".", "results", "[", "'db'", "]", ".", "print", "(", ")" ]
Retrieve all deals
def list ( self , * * params ) : _ , _ , deals = self . http_client . get ( "/deals" , params = params ) for deal in deals : deal [ 'value' ] = Coercion . to_decimal ( deal [ 'value' ] ) return deals
4,941
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L270-L286
[ "def", "_bind_topics", "(", "self", ",", "topics", ")", ":", "# FIXME: Allow for these subscriptions to fail and clean up the previous ones", "# so that this function is atomic", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "status", ",", "self", ".", "_on_status_message", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "tracing", ",", "self", ".", "_on_trace", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "streaming", ",", "self", ".", "_on_report", ")", "self", ".", "client", ".", "subscribe", "(", "topics", ".", "response", ",", "self", ".", "_on_response_message", ")" ]
Create a deal
def create ( self , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . post ( "/deals" , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
4,942
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L288-L311
[ "def", "_load_cache", "(", "self", ")", ":", "# If the cached file exist, read-it", "max_refresh_date", "=", "timedelta", "(", "days", "=", "7", ")", "cached_data", "=", "{", "}", "try", ":", "with", "open", "(", "self", ".", "cache_file", ",", "'rb'", ")", "as", "f", ":", "cached_data", "=", "pickle", ".", "load", "(", "f", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "\"Cannot read version from cache file: {} ({})\"", ".", "format", "(", "self", ".", "cache_file", ",", "e", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Read version from cache file\"", ")", "if", "(", "cached_data", "[", "'installed_version'", "]", "!=", "self", ".", "installed_version", "(", ")", "or", "datetime", ".", "now", "(", ")", "-", "cached_data", "[", "'refresh_date'", "]", ">", "max_refresh_date", ")", ":", "# Reset the cache if:", "# - the installed version is different", "# - the refresh_date is > max_refresh_date", "cached_data", "=", "{", "}", "return", "cached_data" ]
Retrieve a single deal
def retrieve ( self , id ) : _ , _ , deal = self . http_client . get ( "/deals/{id}" . format ( id = id ) ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
4,943
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L313-L328
[ "def", "wait_on_any", "(", "*", "events", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "get", "(", "\"timeout\"", ")", "composite_event", "=", "threading", ".", "Event", "(", ")", "if", "any", "(", "[", "event", ".", "is_set", "(", ")", "for", "event", "in", "events", "]", ")", ":", "return", "def", "on_change", "(", ")", ":", "if", "any", "(", "[", "event", ".", "is_set", "(", ")", "for", "event", "in", "events", "]", ")", ":", "composite_event", ".", "set", "(", ")", "else", ":", "composite_event", ".", "clear", "(", ")", "def", "patch", "(", "original", ")", ":", "def", "patched", "(", ")", ":", "original", "(", ")", "on_change", "(", ")", "return", "patched", "for", "event", "in", "events", ":", "event", ".", "set", "=", "patch", "(", "event", ".", "set", ")", "event", ".", "clear", "=", "patch", "(", "event", ".", "clear", ")", "wait_on_event", "(", "composite_event", ",", "timeout", "=", "timeout", ")" ]
Update a deal
def update ( self , id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for Deal are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) if "value" in attributes : attributes [ "value" ] = Coercion . to_string ( attributes [ "value" ] ) _ , _ , deal = self . http_client . put ( "/deals/{id}" . format ( id = id ) , body = attributes ) deal [ "value" ] = Coercion . to_decimal ( deal [ "value" ] ) return deal
4,944
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L330-L359
[ "def", "_load_cache", "(", "self", ")", ":", "# If the cached file exist, read-it", "max_refresh_date", "=", "timedelta", "(", "days", "=", "7", ")", "cached_data", "=", "{", "}", "try", ":", "with", "open", "(", "self", ".", "cache_file", ",", "'rb'", ")", "as", "f", ":", "cached_data", "=", "pickle", ".", "load", "(", "f", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "\"Cannot read version from cache file: {} ({})\"", ".", "format", "(", "self", ".", "cache_file", ",", "e", ")", ")", "else", ":", "logger", ".", "debug", "(", "\"Read version from cache file\"", ")", "if", "(", "cached_data", "[", "'installed_version'", "]", "!=", "self", ".", "installed_version", "(", ")", "or", "datetime", ".", "now", "(", ")", "-", "cached_data", "[", "'refresh_date'", "]", ">", "max_refresh_date", ")", ":", "# Reset the cache if:", "# - the installed version is different", "# - the refresh_date is > max_refresh_date", "cached_data", "=", "{", "}", "return", "cached_data" ]
Update a source
def update ( self , id , * args , * * kwargs ) : if not args and not kwargs : raise Exception ( 'attributes for DealSource are missing' ) attributes = args [ 0 ] if args else kwargs attributes = dict ( ( k , v ) for k , v in attributes . iteritems ( ) if k in self . OPTS_KEYS_TO_PERSIST ) _ , _ , deal_source = self . http_client . put ( "/deal_sources/{id}" . format ( id = id ) , body = attributes ) return deal_source
4,945
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L459-L484
[ "async", "def", "_unsubscribe", "(", "self", ",", "channels", ",", "is_mask", ")", ":", "vanished", "=", "[", "]", "if", "channels", ":", "for", "channel", "in", "channels", ":", "key", "=", "channel", ",", "is_mask", "self", ".", "_channels", ".", "remove", "(", "key", ")", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ".", "remove", "(", "self", ".", "_queue", ")", "if", "not", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ":", "# we were last sub?", "vanished", ".", "append", "(", "channel", ")", "del", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", "else", ":", "while", "self", ".", "_channels", ":", "channel", ",", "is_mask", "=", "key", "=", "self", ".", "_channels", ".", "pop", "(", ")", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ".", "remove", "(", "self", ".", "_queue", ")", "if", "not", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", ":", "vanished", ".", "append", "(", "channel", ")", "del", "self", ".", "_plugin", ".", "_subscriptions", "[", "key", "]", "if", "vanished", ":", "await", "getattr", "(", "self", ".", "_sub", ",", "'punsubscribe'", "if", "is_mask", "else", "'unsubscribe'", ")", "(", "vanished", ")" ]
Retrieve all deal unqualified reasons
def list ( self , * * params ) : _ , _ , deal_unqualified_reasons = self . http_client . get ( "/deal_unqualified_reasons" , params = params ) return deal_unqualified_reasons
4,946
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L528-L541
[ "def", "slice", "(", "self", ",", "rng", ",", "num_of_slices", "=", "None", ",", "slice_pos", "=", "None", ",", "slice_start", "=", "None", ",", "slice_end", "=", "None", ",", "cache_dir", "=", "None", ")", ":", "if", "num_of_slices", "is", "not", "None", "and", "slice_pos", "is", "not", "None", "and", "slice_start", "is", "None", "and", "slice_end", "is", "None", ":", "size", "=", "self", ".", "_size", "//", "num_of_slices", "amount", "=", "self", ".", "_size", "%", "num_of_slices", "slice_start", "=", "slice_pos", "*", "size", "if", "slice_pos", "<", "amount", ":", "slice_start", "+=", "slice_pos", "else", ":", "slice_start", "+=", "amount", "slice_end", "=", "slice_start", "+", "size", "if", "slice_end", ">", "self", ".", "_size", ":", "slice_start", "-=", "(", "slice_end", "-", "self", ".", "_size", ")", "slice_end", "=", "self", ".", "_size", "elif", "num_of_slices", "is", "None", "and", "slice_pos", "is", "None", "and", "slice_start", "is", "not", "None", "and", "slice_end", "is", "not", "None", ":", "pass", "else", ":", "logger", ".", "critical", "(", "'You must specify position(num_of_slice and slice_pos) or range(slice_start and slice_end).'", ")", "return", "None", "if", "cache_dir", "is", "None", ":", "ds", "=", "self", ".", "_data_source", "while", "'_data_source'", "in", "dir", "(", "ds", ")", ":", "if", "'_cache_dir'", "in", "dir", "(", "ds", ")", ":", "cache_dir", "=", "ds", ".", "_cache_dir", "ds", "=", "ds", ".", "_data_source", "if", "cache_dir", "is", "None", ":", "return", "DataIterator", "(", "DataSourceWithMemoryCache", "(", "SlicedDataSource", "(", "self", ".", "_data_source", ",", "self", ".", "_data_source", ".", "shuffle", ",", "slice_start", "=", "slice_start", ",", "slice_end", "=", "slice_end", ")", ",", "shuffle", "=", "self", ".", "_shuffle", ",", "rng", "=", "rng", ")", ",", "self", ".", "_batch_size", ")", "else", ":", "return", "DataIterator", "(", "DataSourceWithMemoryCache", "(", "DataSourceWithFileCache", "(", "SlicedDataSource", "(", "self", ".", "_data_source", ",", "self", ".", "_data_source", ".", "shuffle", ",", "slice_start", "=", "slice_start", ",", "slice_end", "=", "slice_end", ")", ",", "cache_dir", "=", "cache_dir", ",", "cache_file_name_prefix", "=", "'cache_sliced_{:08d}_{:08d}'", ".", "format", "(", "slice_start", ",", "slice_end", ")", ",", "shuffle", "=", "self", ".", "_shuffle", ",", "rng", "=", "rng", ")", ",", "shuffle", "=", "self", ".", "_shuffle", ",", "rng", "=", "rng", ")", ",", "self", ".", "_batch_size", ")" ]
Retrieve a single deal unqualified reason
def retrieve ( self , id ) : _ , _ , deal_unqualified_reason = self . http_client . get ( "/deal_unqualified_reasons/{id}" . format ( id = id ) ) return deal_unqualified_reason
4,947
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L568-L582
[ "def", "generate_sample_cdk_py_module", "(", "env_root", ",", "module_dir", "=", "None", ")", ":", "if", "module_dir", "is", "None", ":", "module_dir", "=", "os", ".", "path", ".", "join", "(", "env_root", ",", "'sampleapp.cdk'", ")", "generate_sample_module", "(", "module_dir", ")", "for", "i", "in", "[", "'app.py'", ",", "'cdk.json'", ",", "'lambda-index.py'", ",", "'package.json'", ",", "'runway.module.yml'", ",", "'Pipfile'", "]", ":", "shutil", ".", "copyfile", "(", "os", ".", "path", ".", "join", "(", "ROOT", ",", "'templates'", ",", "'cdk-py'", ",", "i", ")", ",", "os", ".", "path", ".", "join", "(", "module_dir", ",", "i", ")", ",", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "module_dir", ",", "'.gitignore'", ")", ",", "'w'", ")", "as", "stream", ":", "stream", ".", "write", "(", "'node_modules'", ")", "LOGGER", ".", "info", "(", "\"Sample CDK module created at %s\"", ",", "module_dir", ")", "LOGGER", ".", "info", "(", "'To finish its setup, change to the %s directory and execute '", "'\"npm install\" and \"pipenv update -d --three\" to generate its '", "'lockfiles.'", ",", "module_dir", ")" ]
Retrieve all leads
def list ( self , * * params ) : _ , _ , leads = self . http_client . get ( "/leads" , params = params ) return leads
4,948
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L653-L666
[ "def", "export_draco", "(", "mesh", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.ply'", ")", "as", "temp_ply", ":", "temp_ply", ".", "write", "(", "export_ply", "(", "mesh", ")", ")", "temp_ply", ".", "flush", "(", ")", "with", "tempfile", ".", "NamedTemporaryFile", "(", "suffix", "=", "'.drc'", ")", "as", "encoded", ":", "subprocess", ".", "check_output", "(", "[", "draco_encoder", ",", "'-qp'", ",", "# bits of quantization for position", "'28'", ",", "# since our tol.merge is 1e-8, 25 bits", "# more has a machine epsilon", "# smaller than that", "'-i'", ",", "temp_ply", ".", "name", ",", "'-o'", ",", "encoded", ".", "name", "]", ")", "encoded", ".", "seek", "(", "0", ")", "data", "=", "encoded", ".", "read", "(", ")", "return", "data" ]
Retrieve a single lead
def retrieve ( self , id ) : _ , _ , lead = self . http_client . get ( "/leads/{id}" . format ( id = id ) ) return lead
4,949
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L691-L705
[ "def", "enum", "(", "option", ",", "*", "options", ")", ":", "options", "=", "(", "option", ",", ")", "+", "options", "rangeob", "=", "range", "(", "len", "(", "options", ")", ")", "try", ":", "inttype", "=", "_inttypes", "[", "int", "(", "np", ".", "log2", "(", "len", "(", "options", ")", "-", "1", ")", ")", "//", "8", "]", "except", "IndexError", ":", "raise", "OverflowError", "(", "'Cannot store enums with more than sys.maxsize elements, got %d'", "%", "len", "(", "options", ")", ",", ")", "class", "_enum", "(", "Structure", ")", ":", "_fields_", "=", "[", "(", "o", ",", "inttype", ")", "for", "o", "in", "options", "]", "def", "__iter__", "(", "self", ")", ":", "return", "iter", "(", "rangeob", ")", "def", "__contains__", "(", "self", ",", "value", ")", ":", "return", "0", "<=", "value", "<", "len", "(", "options", ")", "def", "__repr__", "(", "self", ")", ":", "return", "'<enum: %s>'", "%", "(", "(", "'%d fields'", "%", "len", "(", "options", ")", ")", "if", "len", "(", "options", ")", ">", "10", "else", "repr", "(", "options", ")", ")", "return", "_enum", "(", "*", "rangeob", ")" ]
Retrieve all lead unqualified reasons
def list ( self , * * params ) : _ , _ , lead_unqualified_reasons = self . http_client . get ( "/lead_unqualified_reasons" , params = params ) return lead_unqualified_reasons
4,950
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L898-L911
[ "def", "random_date", "(", ")", ":", "d", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "date", "(", ")", "d", "=", "d", "-", "datetime", ".", "timedelta", "(", "random", ".", "randint", "(", "20", ",", "2001", ")", ")", "return", "d" ]
Retrieve order s line items
def list ( self , order_id , * * params ) : _ , _ , line_items = self . http_client . get ( "/orders/{order_id}/line_items" . format ( order_id = order_id ) , params = params ) return line_items
4,951
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L938-L952
[ "def", "buildDiscover", "(", "base_url", ",", "out_dir", ")", ":", "test_data", "=", "discoverdata", ".", "readTests", "(", "discoverdata", ".", "default_test_file", ")", "def", "writeTestFile", "(", "test_name", ")", ":", "template", "=", "test_data", "[", "test_name", "]", "data", "=", "discoverdata", ".", "fillTemplate", "(", "test_name", ",", "template", ",", "base_url", ",", "discoverdata", ".", "example_xrds", ")", "out_file_name", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "test_name", ")", "out_file", "=", "file", "(", "out_file_name", ",", "'w'", ")", "out_file", ".", "write", "(", "data", ")", "manifest", "=", "[", "manifest_header", "]", "for", "success", ",", "input_name", ",", "id_name", ",", "result_name", "in", "discoverdata", ".", "testlist", ":", "if", "not", "success", ":", "continue", "writeTestFile", "(", "input_name", ")", "input_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "input_name", ")", "id_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "id_name", ")", "result_url", "=", "urlparse", ".", "urljoin", "(", "base_url", ",", "result_name", ")", "manifest", ".", "append", "(", "'\\t'", ".", "join", "(", "(", "input_url", ",", "id_url", ",", "result_url", ")", ")", ")", "manifest", ".", "append", "(", "'\\n'", ")", "manifest_file_name", "=", "os", ".", "path", ".", "join", "(", "out_dir", ",", "'manifest.txt'", ")", "manifest_file", "=", "file", "(", "manifest_file_name", ",", "'w'", ")", "for", "chunk", "in", "manifest", ":", "manifest_file", ".", "write", "(", "chunk", ")", "manifest_file", ".", "close", "(", ")" ]
Retrieve a single line item
def retrieve ( self , order_id , id ) : _ , _ , line_item = self . http_client . get ( "/orders/{order_id}/line_items/{id}" . format ( order_id = order_id , id = id ) ) return line_item
4,952
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L979-L993
[ "def", "shape", "(", "self", ")", "->", "Tuple", "[", "int", ",", "...", "]", ":", "nmb_place", "=", "len", "(", "self", ".", "sequences", ")", "nmb_time", "=", "len", "(", "hydpy", ".", "pub", ".", "timegrids", ".", "init", ")", "nmb_others", "=", "collections", ".", "deque", "(", ")", "for", "sequence", "in", "self", ".", "sequences", ".", "values", "(", ")", ":", "nmb_others", ".", "append", "(", "sequence", ".", "shape", ")", "nmb_others_max", "=", "tuple", "(", "numpy", ".", "max", "(", "nmb_others", ",", "axis", "=", "0", ")", ")", "return", "self", ".", "sort_timeplaceentries", "(", "nmb_time", ",", "nmb_place", ")", "+", "nmb_others_max" ]
Retrieve all reasons
def list ( self , * * params ) : _ , _ , loss_reasons = self . http_client . get ( "/loss_reasons" , params = params ) return loss_reasons
4,953
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1037-L1050
[ "def", "load_plugins", "(", "self", ",", "plugin_class_name", ")", ":", "# imp.findmodule('atomic_reactor') doesn't work", "plugins_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'plugins'", ")", "logger", ".", "debug", "(", "\"loading plugins from dir '%s'\"", ",", "plugins_dir", ")", "files", "=", "[", "os", ".", "path", ".", "join", "(", "plugins_dir", ",", "f", ")", "for", "f", "in", "os", ".", "listdir", "(", "plugins_dir", ")", "if", "f", ".", "endswith", "(", "\".py\"", ")", "]", "if", "self", ".", "plugin_files", ":", "logger", ".", "debug", "(", "\"loading additional plugins from files '%s'\"", ",", "self", ".", "plugin_files", ")", "files", "+=", "self", ".", "plugin_files", "plugin_class", "=", "globals", "(", ")", "[", "plugin_class_name", "]", "plugin_classes", "=", "{", "}", "for", "f", "in", "files", ":", "module_name", "=", "os", ".", "path", ".", "basename", "(", "f", ")", ".", "rsplit", "(", "'.'", ",", "1", ")", "[", "0", "]", "# Do not reload plugins", "if", "module_name", "in", "sys", ".", "modules", ":", "f_module", "=", "sys", ".", "modules", "[", "module_name", "]", "else", ":", "try", ":", "logger", ".", "debug", "(", "\"load file '%s'\"", ",", "f", ")", "f_module", "=", "imp", ".", "load_source", "(", "module_name", ",", "f", ")", "except", "(", "IOError", ",", "OSError", ",", "ImportError", ",", "SyntaxError", ")", "as", "ex", ":", "logger", ".", "warning", "(", "\"can't load module '%s': %r\"", ",", "f", ",", "ex", ")", "continue", "for", "name", "in", "dir", "(", "f_module", ")", ":", "binding", "=", "getattr", "(", "f_module", ",", "name", ",", "None", ")", "try", ":", "# if you try to compare binding and PostBuildPlugin, python won't match them", "# if you call this script directly b/c:", "# ! <class 'plugins.plugin_rpmqa.PostBuildRPMqaPlugin'> <= <class", "# '__main__.PostBuildPlugin'>", "# but", "# <class 'plugins.plugin_rpmqa.PostBuildRPMqaPlugin'> <= <class", "# 'atomic_reactor.plugin.PostBuildPlugin'>", "is_sub", "=", "issubclass", "(", "binding", ",", "plugin_class", ")", "except", "TypeError", ":", "is_sub", "=", "False", "if", "binding", "and", "is_sub", "and", "plugin_class", ".", "__name__", "!=", "binding", ".", "__name__", ":", "plugin_classes", "[", "binding", ".", "key", "]", "=", "binding", "return", "plugin_classes" ]
Retrieve a single reason
def retrieve ( self , id ) : _ , _ , loss_reason = self . http_client . get ( "/loss_reasons/{id}" . format ( id = id ) ) return loss_reason
4,954
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1077-L1091
[ "def", "get_tiltplane", "(", "self", ",", "sequence", ")", ":", "sequence", "=", "sorted", "(", "sequence", ",", "key", "=", "lambda", "x", ":", "self", ".", "virtual_atoms", "[", "x", "]", ".", "z", ")", "in_plane", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "sequence", ")", "-", "4", ")", ":", "if", "abs", "(", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "]", "]", ".", "z", "-", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "+", "1", "]", "]", ".", "z", ")", "<", "self", ".", "OCTAHEDRON_ATOMS_Z_DIFFERENCE", "and", "abs", "(", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "+", "1", "]", "]", ".", "z", "-", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "+", "2", "]", "]", ".", "z", ")", "<", "self", ".", "OCTAHEDRON_ATOMS_Z_DIFFERENCE", "and", "abs", "(", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "+", "2", "]", "]", ".", "z", "-", "self", ".", "virtual_atoms", "[", "sequence", "[", "i", "+", "3", "]", "]", ".", "z", ")", "<", "self", ".", "OCTAHEDRON_ATOMS_Z_DIFFERENCE", ":", "in_plane", "=", "[", "sequence", "[", "j", "]", "for", "j", "in", "range", "(", "i", ",", "i", "+", "4", ")", "]", "return", "in_plane" ]
Retrieve all notes
def list ( self , * * params ) : _ , _ , notes = self . http_client . get ( "/notes" , params = params ) return notes
4,955
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1162-L1175
[ "def", "process_request", "(", "self", ",", "request", ")", ":", "# User has already been authed by alternate middleware", "if", "hasattr", "(", "request", ",", "\"facebook\"", ")", "and", "request", ".", "facebook", ":", "return", "request", ".", "facebook", "=", "False", "if", "not", "self", ".", "is_valid_path", "(", "request", ")", ":", "return", "if", "self", ".", "is_access_denied", "(", "request", ")", ":", "return", "authorization_denied_view", "(", "request", ")", "request", ".", "facebook", "=", "Facebook", "(", ")", "oauth_token", "=", "False", "# Is there a token cookie already present?", "if", "'oauth_token'", "in", "request", ".", "COOKIES", ":", "try", ":", "# Check if the current token is already in DB", "oauth_token", "=", "OAuthToken", ".", "objects", ".", "get", "(", "token", "=", "request", ".", "COOKIES", "[", "'oauth_token'", "]", ")", "except", "OAuthToken", ".", "DoesNotExist", ":", "request", ".", "facebook", "=", "False", "return", "# Is there a code in the GET request?", "elif", "'code'", "in", "request", ".", "GET", ":", "try", ":", "graph", "=", "GraphAPI", "(", ")", "# Exchange code for an access_token", "response", "=", "graph", ".", "get", "(", "'oauth/access_token'", ",", "client_id", "=", "FACEBOOK_APPLICATION_ID", ",", "redirect_uri", "=", "get_post_authorization_redirect_url", "(", "request", ",", "canvas", "=", "False", ")", ",", "client_secret", "=", "FACEBOOK_APPLICATION_SECRET_KEY", ",", "code", "=", "request", ".", "GET", "[", "'code'", "]", ",", ")", "components", "=", "parse_qs", "(", "response", ")", "# Save new OAuth-token in DB", "oauth_token", ",", "new_oauth_token", "=", "OAuthToken", ".", "objects", ".", "get_or_create", "(", "token", "=", "components", "[", "'access_token'", "]", "[", "0", "]", ",", "issued_at", "=", "now", "(", ")", ",", "expires_at", "=", "now", "(", ")", "+", "timedelta", "(", "seconds", "=", "int", "(", "components", "[", "'expires'", "]", "[", "0", "]", ")", ")", ")", "except", "GraphAPI", ".", "OAuthError", ":", "pass", "# There isn't a valid access_token", "if", "not", "oauth_token", "or", "oauth_token", ".", "expired", ":", "request", ".", "facebook", "=", "False", "return", "# Is there a user already connected to the current token?", "try", ":", "user", "=", "oauth_token", ".", "user", "if", "not", "user", ".", "authorized", ":", "request", ".", "facebook", "=", "False", "return", "user", ".", "last_seen_at", "=", "now", "(", ")", "user", ".", "save", "(", ")", "except", "User", ".", "DoesNotExist", ":", "graph", "=", "GraphAPI", "(", "oauth_token", ".", "token", ")", "profile", "=", "graph", ".", "get", "(", "'me'", ")", "# Either the user already exists and its just a new token, or user and token both are new", "try", ":", "user", "=", "User", ".", "objects", ".", "get", "(", "facebook_id", "=", "profile", ".", "get", "(", "'id'", ")", ")", "if", "not", "user", ".", "authorized", ":", "if", "new_oauth_token", ":", "user", ".", "last_seen_at", "=", "now", "(", ")", "user", ".", "authorized", "=", "True", "else", ":", "request", ".", "facebook", "=", "False", "return", "except", "User", ".", "DoesNotExist", ":", "# Create a new user to go with token", "user", "=", "User", ".", "objects", ".", "create", "(", "facebook_id", "=", "profile", ".", "get", "(", "'id'", ")", ",", "oauth_token", "=", "oauth_token", ")", "user", ".", "synchronize", "(", "profile", ")", "# Delete old access token if there is any and only if the new one is different", "old_oauth_token", "=", "None", "if", "user", ".", "oauth_token", "!=", "oauth_token", ":", "old_oauth_token", "=", "user", ".", "oauth_token", "user", ".", "oauth_token", "=", "oauth_token", "user", ".", "save", "(", ")", "if", "old_oauth_token", ":", "old_oauth_token", ".", "delete", "(", ")", "if", "not", "user", ".", "oauth_token", ".", "extended", ":", "# Attempt to extend the OAuth token, but ignore exceptions raised by", "# bug #102727766518358 in the Facebook Platform.", "#", "# http://developers.facebook.com/bugs/102727766518358/", "try", ":", "user", ".", "oauth_token", ".", "extend", "(", ")", "except", ":", "pass", "request", ".", "facebook", ".", "user", "=", "user", "request", ".", "facebook", ".", "oauth_token", "=", "oauth_token" ]
Retrieve a single note
def retrieve ( self , id ) : _ , _ , note = self . http_client . get ( "/notes/{id}" . format ( id = id ) ) return note
4,956
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1202-L1216
[ "def", "gen_urls", "(", "self", ",", "text", ")", ":", "tld_pos", "=", "0", "matched_tlds", "=", "self", ".", "_tlds_re", ".", "findall", "(", "text", ")", "for", "tld", "in", "matched_tlds", ":", "tmp_text", "=", "text", "[", "tld_pos", ":", "]", "offset", "=", "tld_pos", "tld_pos", "=", "tmp_text", ".", "find", "(", "tld", ")", "validated", "=", "self", ".", "_validate_tld_match", "(", "text", ",", "tld", ",", "offset", "+", "tld_pos", ")", "if", "tld_pos", "!=", "-", "1", "and", "validated", ":", "tmp_url", "=", "self", ".", "_complete_url", "(", "text", ",", "offset", "+", "tld_pos", ",", "tld", ")", "if", "tmp_url", ":", "yield", "tmp_url", "# do not search for TLD in already extracted URL", "tld_pos_url", "=", "tmp_url", ".", "find", "(", "tld", ")", "# move cursor right after found TLD", "tld_pos", "+=", "len", "(", "tld", ")", "+", "offset", "# move cursor after end of found URL", "tld_pos", "+=", "len", "(", "tmp_url", "[", "tld_pos_url", "+", "len", "(", "tld", ")", ":", "]", ")", "continue", "# move cursor right after found TLD", "tld_pos", "+=", "len", "(", "tld", ")", "+", "offset" ]
Retrieve all orders
def list ( self , * * params ) : _ , _ , orders = self . http_client . get ( "/orders" , params = params ) return orders
4,957
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1284-L1297
[ "def", "creds", "(", "provider", ")", ":", "# Declare globals", "global", "__AccessKeyId__", ",", "__SecretAccessKey__", ",", "__Token__", ",", "__Expiration__", "ret_credentials", "=", "(", ")", "# if id or key is 'use-instance-role-credentials', pull them from meta-data", "## if needed", "if", "provider", "[", "'id'", "]", "==", "IROLE_CODE", "or", "provider", "[", "'key'", "]", "==", "IROLE_CODE", ":", "# Check to see if we have cache credentials that are still good", "if", "__Expiration__", "!=", "''", ":", "timenow", "=", "datetime", ".", "utcnow", "(", ")", "timestamp", "=", "timenow", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "if", "timestamp", "<", "__Expiration__", ":", "# Current timestamp less than expiration fo cached credentials", "return", "__AccessKeyId__", ",", "__SecretAccessKey__", ",", "__Token__", "# We don't have any cached credentials, or they are expired, get them", "# Connections to instance meta-data must fail fast and never be proxied", "try", ":", "result", "=", "requests", ".", "get", "(", "\"http://169.254.169.254/latest/meta-data/iam/security-credentials/\"", ",", "proxies", "=", "{", "'http'", ":", "''", "}", ",", "timeout", "=", "AWS_METADATA_TIMEOUT", ",", ")", "result", ".", "raise_for_status", "(", ")", "role", "=", "result", ".", "text", "except", "(", "requests", ".", "exceptions", ".", "HTTPError", ",", "requests", ".", "exceptions", ".", "ConnectionError", ")", ":", "return", "provider", "[", "'id'", "]", ",", "provider", "[", "'key'", "]", ",", "''", "try", ":", "result", "=", "requests", ".", "get", "(", "\"http://169.254.169.254/latest/meta-data/iam/security-credentials/{0}\"", ".", "format", "(", "role", ")", ",", "proxies", "=", "{", "'http'", ":", "''", "}", ",", "timeout", "=", "AWS_METADATA_TIMEOUT", ",", ")", "result", ".", "raise_for_status", "(", ")", "except", "(", "requests", ".", "exceptions", ".", "HTTPError", ",", "requests", ".", "exceptions", ".", "ConnectionError", ")", ":", "return", "provider", "[", "'id'", "]", ",", "provider", "[", "'key'", "]", ",", "''", "data", "=", "result", ".", "json", "(", ")", "__AccessKeyId__", "=", "data", "[", "'AccessKeyId'", "]", "__SecretAccessKey__", "=", "data", "[", "'SecretAccessKey'", "]", "__Token__", "=", "data", "[", "'Token'", "]", "__Expiration__", "=", "data", "[", "'Expiration'", "]", "ret_credentials", "=", "__AccessKeyId__", ",", "__SecretAccessKey__", ",", "__Token__", "else", ":", "ret_credentials", "=", "provider", "[", "'id'", "]", ",", "provider", "[", "'key'", "]", ",", "''", "if", "provider", ".", "get", "(", "'role_arn'", ")", "is", "not", "None", ":", "provider_shadow", "=", "provider", ".", "copy", "(", ")", "provider_shadow", ".", "pop", "(", "\"role_arn\"", ",", "None", ")", "log", ".", "info", "(", "\"Assuming the role: %s\"", ",", "provider", ".", "get", "(", "'role_arn'", ")", ")", "ret_credentials", "=", "assumed_creds", "(", "provider_shadow", ",", "role_arn", "=", "provider", ".", "get", "(", "'role_arn'", ")", ",", "location", "=", "'us-east-1'", ")", "return", "ret_credentials" ]
Retrieve a single order
def retrieve ( self , id ) : _ , _ , order = self . http_client . get ( "/orders/{id}" . format ( id = id ) ) return order
4,958
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1323-L1337
[ "def", "get_stats_display_width", "(", "self", ",", "curse_msg", ",", "without_option", "=", "False", ")", ":", "try", ":", "if", "without_option", ":", "# Size without options", "c", "=", "len", "(", "max", "(", "''", ".", "join", "(", "[", "(", "u", "(", "u", "(", "nativestr", "(", "i", "[", "'msg'", "]", ")", ")", ".", "encode", "(", "'ascii'", ",", "'replace'", ")", ")", "if", "not", "i", "[", "'optional'", "]", "else", "\"\"", ")", "for", "i", "in", "curse_msg", "[", "'msgdict'", "]", "]", ")", ".", "split", "(", "'\\n'", ")", ",", "key", "=", "len", ")", ")", "else", ":", "# Size with all options", "c", "=", "len", "(", "max", "(", "''", ".", "join", "(", "[", "u", "(", "u", "(", "nativestr", "(", "i", "[", "'msg'", "]", ")", ")", ".", "encode", "(", "'ascii'", ",", "'replace'", ")", ")", "for", "i", "in", "curse_msg", "[", "'msgdict'", "]", "]", ")", ".", "split", "(", "'\\n'", ")", ",", "key", "=", "len", ")", ")", "except", "Exception", "as", "e", ":", "logger", ".", "debug", "(", "'ERROR: Can not compute plugin width ({})'", ".", "format", "(", "e", ")", ")", "return", "0", "else", ":", "return", "c" ]
Retrieve all pipelines
def list ( self , * * params ) : _ , _ , pipelines = self . http_client . get ( "/pipelines" , params = params ) return pipelines
4,959
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1401-L1414
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Retrieve all products
def list ( self , * * params ) : _ , _ , products = self . http_client . get ( "/products" , params = params ) return products
4,960
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1441-L1454
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Retrieve a single product
def retrieve ( self , id ) : _ , _ , product = self . http_client . get ( "/products/{id}" . format ( id = id ) ) return product
4,961
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1478-L1492
[ "def", "load_toml_rest_api_config", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "filename", ")", ":", "LOGGER", ".", "info", "(", "\"Skipping rest api loading from non-existent config file: %s\"", ",", "filename", ")", "return", "RestApiConfig", "(", ")", "LOGGER", ".", "info", "(", "\"Loading rest api information from config: %s\"", ",", "filename", ")", "try", ":", "with", "open", "(", "filename", ")", "as", "fd", ":", "raw_config", "=", "fd", ".", "read", "(", ")", "except", "IOError", "as", "e", ":", "raise", "RestApiConfigurationError", "(", "\"Unable to load rest api configuration file: {}\"", ".", "format", "(", "str", "(", "e", ")", ")", ")", "toml_config", "=", "toml", ".", "loads", "(", "raw_config", ")", "invalid_keys", "=", "set", "(", "toml_config", ".", "keys", "(", ")", ")", ".", "difference", "(", "[", "'bind'", ",", "'connect'", ",", "'timeout'", ",", "'opentsdb_db'", ",", "'opentsdb_url'", ",", "'opentsdb_username'", ",", "'opentsdb_password'", ",", "'client_max_size'", "]", ")", "if", "invalid_keys", ":", "raise", "RestApiConfigurationError", "(", "\"Invalid keys in rest api config: {}\"", ".", "format", "(", "\", \"", ".", "join", "(", "sorted", "(", "list", "(", "invalid_keys", ")", ")", ")", ")", ")", "config", "=", "RestApiConfig", "(", "bind", "=", "toml_config", ".", "get", "(", "\"bind\"", ",", "None", ")", ",", "connect", "=", "toml_config", ".", "get", "(", "'connect'", ",", "None", ")", ",", "timeout", "=", "toml_config", ".", "get", "(", "'timeout'", ",", "None", ")", ",", "opentsdb_url", "=", "toml_config", ".", "get", "(", "'opentsdb_url'", ",", "None", ")", ",", "opentsdb_db", "=", "toml_config", ".", "get", "(", "'opentsdb_db'", ",", "None", ")", ",", "opentsdb_username", "=", "toml_config", ".", "get", "(", "'opentsdb_username'", ",", "None", ")", ",", "opentsdb_password", "=", "toml_config", ".", "get", "(", "'opentsdb_password'", ",", "None", ")", ",", "client_max_size", "=", "toml_config", ".", "get", "(", "'client_max_size'", ",", "None", ")", ")", "return", "config" ]
Retrieve all stages
def list ( self , * * params ) : _ , _ , stages = self . http_client . get ( "/stages" , params = params ) return stages
4,962
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1686-L1699
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Retrieve all tags
def list ( self , * * params ) : _ , _ , tags = self . http_client . get ( "/tags" , params = params ) return tags
4,963
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1726-L1739
[ "def", "_timestamp_regulator", "(", "self", ")", ":", "unified_timestamps", "=", "_PrettyDefaultDict", "(", "list", ")", "staged_files", "=", "self", ".", "_list_audio_files", "(", "sub_dir", "=", "\"staging\"", ")", "for", "timestamp_basename", "in", "self", ".", "__timestamps_unregulated", ":", "if", "len", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ">", "1", ":", "# File has been splitted", "timestamp_name", "=", "''", ".", "join", "(", "timestamp_basename", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "staged_splitted_files_of_timestamp", "=", "list", "(", "filter", "(", "lambda", "staged_file", ":", "(", "timestamp_name", "==", "staged_file", "[", ":", "-", "3", "]", "and", "all", "(", "[", "(", "x", "in", "set", "(", "map", "(", "str", ",", "range", "(", "10", ")", ")", ")", ")", "for", "x", "in", "staged_file", "[", "-", "3", ":", "]", "]", ")", ")", ",", "staged_files", ")", ")", "if", "len", "(", "staged_splitted_files_of_timestamp", ")", "==", "0", ":", "self", ".", "__errors", "[", "(", "time", "(", ")", ",", "timestamp_basename", ")", "]", "=", "{", "\"reason\"", ":", "\"Missing staged file\"", ",", "\"current_staged_files\"", ":", "staged_files", "}", "continue", "staged_splitted_files_of_timestamp", ".", "sort", "(", ")", "unified_timestamp", "=", "list", "(", ")", "for", "staging_digits", ",", "splitted_file", "in", "enumerate", "(", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", ")", ":", "prev_splits_sec", "=", "0", "if", "int", "(", "staging_digits", ")", "!=", "0", ":", "prev_splits_sec", "=", "self", ".", "_get_audio_duration_seconds", "(", "\"{}/staging/{}{:03d}\"", ".", "format", "(", "self", ".", "src_dir", ",", "timestamp_name", ",", "staging_digits", "-", "1", ")", ")", "for", "word_block", "in", "splitted_file", ":", "unified_timestamp", ".", "append", "(", "_WordBlock", "(", "word", "=", "word_block", ".", "word", ",", "start", "=", "round", "(", "word_block", ".", "start", "+", "prev_splits_sec", ",", "2", ")", ",", "end", "=", "round", "(", "word_block", ".", "end", "+", "prev_splits_sec", ",", "2", ")", ")", ")", "unified_timestamps", "[", "str", "(", "timestamp_basename", ")", "]", "+=", "unified_timestamp", "else", ":", "unified_timestamps", "[", "timestamp_basename", "]", "+=", "self", ".", "__timestamps_unregulated", "[", "timestamp_basename", "]", "[", "0", "]", "self", ".", "__timestamps", ".", "update", "(", "unified_timestamps", ")", "self", ".", "__timestamps_unregulated", "=", "_PrettyDefaultDict", "(", "list", ")" ]
Retrieve a single tag
def retrieve ( self , id ) : _ , _ , tag = self . http_client . get ( "/tags/{id}" . format ( id = id ) ) return tag
4,964
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1764-L1778
[ "def", "do_gc", "(", "self", ",", "args", ")", ":", "### humm...", "instance_type", "=", "getattr", "(", "types", ",", "'InstanceType'", ",", "object", ")", "# snapshot of counts", "type2count", "=", "{", "}", "type2all", "=", "{", "}", "for", "o", "in", "gc", ".", "get_objects", "(", ")", ":", "if", "type", "(", "o", ")", "==", "instance_type", ":", "type2count", "[", "o", ".", "__class__", "]", "=", "type2count", ".", "get", "(", "o", ".", "__class__", ",", "0", ")", "+", "1", "type2all", "[", "o", ".", "__class__", "]", "=", "type2all", ".", "get", "(", "o", ".", "__class__", ",", "0", ")", "+", "sys", ".", "getrefcount", "(", "o", ")", "# count the things that have changed", "ct", "=", "[", "(", "t", ".", "__module__", ",", "t", ".", "__name__", ",", "type2count", "[", "t", "]", ",", "type2count", "[", "t", "]", "-", "self", ".", "type2count", ".", "get", "(", "t", ",", "0", ")", ",", "type2all", "[", "t", "]", "-", "self", ".", "type2all", ".", "get", "(", "t", ",", "0", ")", ")", "for", "t", "in", "type2count", ".", "iterkeys", "(", ")", "]", "# ready for the next time", "self", ".", "type2count", "=", "type2count", "self", ".", "type2all", "=", "type2all", "fmt", "=", "\"%-30s %-30s %6s %6s %6s\\n\"", "self", ".", "stdout", ".", "write", "(", "fmt", "%", "(", "\"Module\"", ",", "\"Type\"", ",", "\"Count\"", ",", "\"dCount\"", ",", "\"dRef\"", ")", ")", "# sorted by count", "ct", ".", "sort", "(", "lambda", "x", ",", "y", ":", "cmp", "(", "y", "[", "2", "]", ",", "x", "[", "2", "]", ")", ")", "for", "i", "in", "range", "(", "min", "(", "10", ",", "len", "(", "ct", ")", ")", ")", ":", "m", ",", "n", ",", "c", ",", "delta1", ",", "delta2", "=", "ct", "[", "i", "]", "self", ".", "stdout", ".", "write", "(", "fmt", "%", "(", "m", ",", "n", ",", "c", ",", "delta1", ",", "delta2", ")", ")", "self", ".", "stdout", ".", "write", "(", "\"\\n\"", ")", "self", ".", "stdout", ".", "write", "(", "fmt", "%", "(", "\"Module\"", ",", "\"Type\"", ",", "\"Count\"", ",", "\"dCount\"", ",", "\"dRef\"", ")", ")", "# sorted by module and class", "ct", ".", "sort", "(", ")", "for", "m", ",", "n", ",", "c", ",", "delta1", ",", "delta2", "in", "ct", ":", "if", "delta1", "or", "delta2", ":", "self", ".", "stdout", ".", "write", "(", "fmt", "%", "(", "m", ",", "n", ",", "c", ",", "delta1", ",", "delta2", ")", ")", "self", ".", "stdout", ".", "write", "(", "\"\\n\"", ")" ]
Retrieve all tasks
def list ( self , * * params ) : _ , _ , tasks = self . http_client . get ( "/tasks" , params = params ) return tasks
4,965
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1848-L1863
[ "def", "connection", "(", "cls", ")", ":", "local", "=", "cls", ".", "_threadlocal", "if", "not", "getattr", "(", "local", ",", "'connection'", ",", "None", ")", ":", "# Make sure these variables are no longer affected by other threads.", "local", ".", "user", "=", "cls", ".", "user", "local", ".", "password", "=", "cls", ".", "password", "local", ".", "site", "=", "cls", ".", "site", "local", ".", "timeout", "=", "cls", ".", "timeout", "local", ".", "headers", "=", "cls", ".", "headers", "local", ".", "format", "=", "cls", ".", "format", "local", ".", "version", "=", "cls", ".", "version", "local", ".", "url", "=", "cls", ".", "url", "if", "cls", ".", "site", "is", "None", ":", "raise", "ValueError", "(", "\"No shopify session is active\"", ")", "local", ".", "connection", "=", "ShopifyConnection", "(", "cls", ".", "site", ",", "cls", ".", "user", ",", "cls", ".", "password", ",", "cls", ".", "timeout", ",", "cls", ".", "format", ")", "return", "local", ".", "connection" ]
Retrieve a single task
def retrieve ( self , id ) : _ , _ , task = self . http_client . get ( "/tasks/{id}" . format ( id = id ) ) return task
4,966
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1891-L1905
[ "def", "rate_limit", "(", "f", ")", ":", "def", "new_f", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "errors", "=", "0", "while", "True", ":", "resp", "=", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "resp", ".", "status_code", "==", "200", ":", "errors", "=", "0", "return", "resp", "elif", "resp", ".", "status_code", "==", "401", ":", "# Hack to retain the original exception, but augment it with", "# additional context for the user to interpret it. In a Python", "# 3 only future we can raise a new exception of the same type", "# with a new message from the old error.", "try", ":", "resp", ".", "raise_for_status", "(", ")", "except", "requests", ".", "HTTPError", "as", "e", ":", "message", "=", "\"\\nThis is a protected or locked account, or\"", "+", "\" the credentials provided are no longer valid.\"", "e", ".", "args", "=", "(", "e", ".", "args", "[", "0", "]", "+", "message", ",", ")", "+", "e", ".", "args", "[", "1", ":", "]", "log", ".", "warning", "(", "\"401 Authentication required for %s\"", ",", "resp", ".", "url", ")", "raise", "elif", "resp", ".", "status_code", "==", "429", ":", "reset", "=", "int", "(", "resp", ".", "headers", "[", "'x-rate-limit-reset'", "]", ")", "now", "=", "time", ".", "time", "(", ")", "seconds", "=", "reset", "-", "now", "+", "10", "if", "seconds", "<", "1", ":", "seconds", "=", "10", "log", ".", "warning", "(", "\"rate limit exceeded: sleeping %s secs\"", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "elif", "resp", ".", "status_code", ">=", "500", ":", "errors", "+=", "1", "if", "errors", ">", "30", ":", "log", ".", "warning", "(", "\"too many errors from Twitter, giving up\"", ")", "resp", ".", "raise_for_status", "(", ")", "seconds", "=", "60", "*", "errors", "log", ".", "warning", "(", "\"%s from Twitter API, sleeping %s\"", ",", "resp", ".", "status_code", ",", "seconds", ")", "time", ".", "sleep", "(", "seconds", ")", "else", ":", "resp", ".", "raise_for_status", "(", ")", "return", "new_f" ]
Retrieve text messages
def list ( self , * * params ) : _ , _ , text_messages = self . http_client . get ( "/text_messages" , params = params ) return text_messages
4,967
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1969-L1982
[ "def", "returnJobReqs", "(", "self", ",", "jobReqs", ")", ":", "# Since we are only reading this job's specific values from the state file, we don't", "# need a lock", "jobState", "=", "self", ".", "_JobState", "(", "self", ".", "_CacheState", ".", "_load", "(", "self", ".", "cacheStateFile", ")", ".", "jobState", "[", "self", ".", "jobID", "]", ")", "for", "x", "in", "list", "(", "jobState", ".", "jobSpecificFiles", ".", "keys", "(", ")", ")", ":", "self", ".", "deleteLocalFile", "(", "x", ")", "with", "self", ".", "_CacheState", ".", "open", "(", "self", ")", "as", "cacheInfo", ":", "cacheInfo", ".", "sigmaJob", "-=", "jobReqs" ]
Retrieve a single text message
def retrieve ( self , id ) : _ , _ , text_message = self . http_client . get ( "/text_messages/{id}" . format ( id = id ) ) return text_message
4,968
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L1984-L1998
[ "def", "get_and_project_events", "(", "self", ",", "entity_id", ",", "gt", "=", "None", ",", "gte", "=", "None", ",", "lt", "=", "None", ",", "lte", "=", "None", ",", "limit", "=", "None", ",", "initial_state", "=", "None", ",", "query_descending", "=", "False", ")", ":", "# Decide if query is in ascending order.", "# - A \"speed up\" for when events are stored in descending order (e.g.", "# in Cassandra) and it is faster to get them in that order.", "# - This isn't useful when 'until' or 'after' or 'limit' are set,", "# because the inclusiveness or exclusiveness of until and after", "# and the end of the stream that is truncated by limit both depend on", "# the direction of the query. Also paging backwards isn't useful, because", "# all the events are needed eventually, so it would probably slow things", "# down. Paging is intended to support replaying longer event streams, and", "# only makes sense to work in ascending order.", "if", "gt", "is", "None", "and", "gte", "is", "None", "and", "lt", "is", "None", "and", "lte", "is", "None", "and", "self", ".", "__page_size__", "is", "None", ":", "is_ascending", "=", "False", "else", ":", "is_ascending", "=", "not", "query_descending", "# Get entity's domain events from the event store.", "domain_events", "=", "self", ".", "event_store", ".", "get_domain_events", "(", "originator_id", "=", "entity_id", ",", "gt", "=", "gt", ",", "gte", "=", "gte", ",", "lt", "=", "lt", ",", "lte", "=", "lte", ",", "limit", "=", "limit", ",", "is_ascending", "=", "is_ascending", ",", "page_size", "=", "self", ".", "__page_size__", ")", "# The events will be replayed in ascending order.", "if", "not", "is_ascending", ":", "domain_events", "=", "list", "(", "reversed", "(", "list", "(", "domain_events", ")", ")", ")", "# Project the domain events onto the initial state.", "return", "self", ".", "project_events", "(", "initial_state", ",", "domain_events", ")" ]
Retrieve all users
def list ( self , * * params ) : _ , _ , users = self . http_client . get ( "/users" , params = params ) return users
4,969
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L2021-L2034
[ "def", "vn_release", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "call", "!=", "'function'", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_reserve function must be called with -f or --function.'", ")", "if", "kwargs", "is", "None", ":", "kwargs", "=", "{", "}", "vn_id", "=", "kwargs", ".", "get", "(", "'vn_id'", ",", "None", ")", "vn_name", "=", "kwargs", ".", "get", "(", "'vn_name'", ",", "None", ")", "path", "=", "kwargs", ".", "get", "(", "'path'", ",", "None", ")", "data", "=", "kwargs", ".", "get", "(", "'data'", ",", "None", ")", "if", "vn_id", ":", "if", "vn_name", ":", "log", ".", "warning", "(", "'Both the \\'vn_id\\' and \\'vn_name\\' arguments were provided. '", "'\\'vn_id\\' will take precedence.'", ")", "elif", "vn_name", ":", "vn_id", "=", "get_vn_id", "(", "kwargs", "=", "{", "'name'", ":", "vn_name", "}", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires a \\'vn_id\\' or a \\'vn_name\\' to '", "'be provided.'", ")", "if", "data", ":", "if", "path", ":", "log", ".", "warning", "(", "'Both the \\'data\\' and \\'path\\' arguments were provided. '", "'\\'data\\' will take precedence.'", ")", "elif", "path", ":", "with", "salt", ".", "utils", ".", "files", ".", "fopen", "(", "path", ",", "mode", "=", "'r'", ")", "as", "rfh", ":", "data", "=", "rfh", ".", "read", "(", ")", "else", ":", "raise", "SaltCloudSystemExit", "(", "'The vn_release function requires either \\'data\\' or a \\'path\\' to '", "'be provided.'", ")", "server", ",", "user", ",", "password", "=", "_get_xml_rpc", "(", ")", "auth", "=", "':'", ".", "join", "(", "[", "user", ",", "password", "]", ")", "response", "=", "server", ".", "one", ".", "vn", ".", "release", "(", "auth", ",", "int", "(", "vn_id", ")", ",", "data", ")", "ret", "=", "{", "'action'", ":", "'vn.release'", ",", "'released'", ":", "response", "[", "0", "]", ",", "'resource_id'", ":", "response", "[", "1", "]", ",", "'error_code'", ":", "response", "[", "2", "]", ",", "}", "return", "ret" ]
Retrieve a single user
def retrieve ( self , id ) : _ , _ , user = self . http_client . get ( "/users/{id}" . format ( id = id ) ) return user
4,970
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L2036-L2050
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Retrieve visit outcomes
def list ( self , * * params ) : _ , _ , visit_outcomes = self . http_client . get ( "/visit_outcomes" , params = params ) return visit_outcomes
4,971
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/services.py#L2122-L2135
[ "def", "wrap", "(", "vtkdataset", ")", ":", "wrappers", "=", "{", "'vtkUnstructuredGrid'", ":", "vtki", ".", "UnstructuredGrid", ",", "'vtkRectilinearGrid'", ":", "vtki", ".", "RectilinearGrid", ",", "'vtkStructuredGrid'", ":", "vtki", ".", "StructuredGrid", ",", "'vtkPolyData'", ":", "vtki", ".", "PolyData", ",", "'vtkImageData'", ":", "vtki", ".", "UniformGrid", ",", "'vtkStructuredPoints'", ":", "vtki", ".", "UniformGrid", ",", "'vtkMultiBlockDataSet'", ":", "vtki", ".", "MultiBlock", ",", "}", "key", "=", "vtkdataset", ".", "GetClassName", "(", ")", "try", ":", "wrapped", "=", "wrappers", "[", "key", "]", "(", "vtkdataset", ")", "except", ":", "logging", ".", "warning", "(", "'VTK data type ({}) is not currently supported by vtki.'", ".", "format", "(", "key", ")", ")", "return", "vtkdataset", "# if not supported just passes the VTK data object", "return", "wrapped" ]
Do the HTTP Request and return data
def request ( url , * args , * * kwargs ) : method = kwargs . get ( 'method' , 'GET' ) timeout = kwargs . pop ( 'timeout' , 10 ) # hass default timeout req = requests . request ( method , url , * args , timeout = timeout , * * kwargs ) data = req . json ( ) _LOGGER . debug ( json . dumps ( data ) ) return data
4,972
https://github.com/jalmeroth/pymusiccast/blob/616379ae22d6b518c61042d58be6d18a46242168/pymusiccast/helpers.py#L10-L17
[ "def", "update_cluster", "(", "cluster_ref", ",", "cluster_spec", ")", ":", "cluster_name", "=", "get_managed_object_name", "(", "cluster_ref", ")", "log", ".", "trace", "(", "'Updating cluster \\'%s\\''", ",", "cluster_name", ")", "try", ":", "task", "=", "cluster_ref", ".", "ReconfigureComputeResource_Task", "(", "cluster_spec", ",", "modify", "=", "True", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "wait_for_task", "(", "task", ",", "cluster_name", ",", "'ClusterUpdateTask'", ")" ]
Loop through messages and pass them on to right device
def message_worker ( device ) : _LOGGER . debug ( "Starting Worker Thread." ) msg_q = device . messages while True : if not msg_q . empty ( ) : message = msg_q . get ( ) data = { } try : data = json . loads ( message . decode ( "utf-8" ) ) except ValueError : _LOGGER . error ( "Received invalid message: %s" , message ) if 'device_id' in data : device_id = data . get ( 'device_id' ) if device_id == device . device_id : device . handle_event ( data ) else : _LOGGER . warning ( "Received message for unknown device." ) msg_q . task_done ( ) time . sleep ( 0.2 )
4,973
https://github.com/jalmeroth/pymusiccast/blob/616379ae22d6b518c61042d58be6d18a46242168/pymusiccast/helpers.py#L20-L44
[ "def", "get", "(", "self", ",", "path_info", ")", ":", "assert", "path_info", "[", "\"scheme\"", "]", "==", "\"local\"", "path", "=", "path_info", "[", "\"path\"", "]", "if", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ":", "return", "None", "actual_mtime", ",", "actual_size", "=", "get_mtime_and_size", "(", "path", ")", "actual_inode", "=", "get_inode", "(", "path", ")", "existing_record", "=", "self", ".", "get_state_record_for_inode", "(", "actual_inode", ")", "if", "not", "existing_record", ":", "return", "None", "mtime", ",", "size", ",", "checksum", ",", "_", "=", "existing_record", "if", "self", ".", "_file_metadata_changed", "(", "actual_mtime", ",", "mtime", ",", "actual_size", ",", "size", ")", ":", "return", "None", "self", ".", "_update_state_record_timestamp_for_inode", "(", "actual_inode", ")", "return", "checksum" ]
Socket Loop that fills message queue
def socket_worker ( sock , msg_q ) : _LOGGER . debug ( "Starting Socket Thread." ) while True : try : data , addr = sock . recvfrom ( 1024 ) # buffer size is 1024 bytes except OSError as err : _LOGGER . error ( err ) else : _LOGGER . debug ( "received message: %s from %s" , data , addr ) msg_q . put ( data ) time . sleep ( 0.2 )
4,974
https://github.com/jalmeroth/pymusiccast/blob/616379ae22d6b518c61042d58be6d18a46242168/pymusiccast/helpers.py#L47-L58
[ "def", "_get_license_description", "(", "license_code", ")", ":", "req", "=", "requests", ".", "get", "(", "\"{base_url}/licenses/{license_code}\"", ".", "format", "(", "base_url", "=", "BASE_URL", ",", "license_code", "=", "license_code", ")", ",", "headers", "=", "_HEADERS", ")", "if", "req", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "s", "=", "req", ".", "json", "(", ")", "[", "\"body\"", "]", "search_curly", "=", "re", ".", "search", "(", "r'\\{(.*)\\}'", ",", "s", ")", "search_square", "=", "re", ".", "search", "(", "r'\\[(.*)\\]'", ",", "s", ")", "license", "=", "\"\"", "replace_string", "=", "'{year} {name}'", ".", "format", "(", "year", "=", "date", ".", "today", "(", ")", ".", "year", ",", "name", "=", "_get_config_name", "(", ")", ")", "if", "search_curly", ":", "license", "=", "re", ".", "sub", "(", "r'\\{(.+)\\}'", ",", "replace_string", ",", "s", ")", "elif", "search_square", ":", "license", "=", "re", ".", "sub", "(", "r'\\[(.+)\\]'", ",", "replace_string", ",", "s", ")", "else", ":", "license", "=", "s", "return", "license", "else", ":", "print", "(", "Fore", ".", "RED", "+", "'No such license. Please check again.'", ")", ",", "print", "(", "Style", ".", "RESET_ALL", ")", ",", "sys", ".", "exit", "(", ")" ]
Toplogically sorts a list match graph .
def toposort ( graph , pick_first = 'head' ) : in_deg = { } for node , next_nodes in six . iteritems ( graph ) : for next_node in [ next_nodes . head_node , next_nodes . update_node ] : if next_node is None : continue in_deg [ next_node ] = in_deg . get ( next_node , 0 ) + 1 stk = [ FIRST ] ordered = [ ] visited = set ( ) while stk : node = stk . pop ( ) visited . add ( node ) if node != FIRST : ordered . append ( node ) traversal = _get_traversal ( graph . get ( node , BeforeNodes ( ) ) , pick_first ) for next_node in traversal : if next_node is None : continue if next_node in visited : raise ValueError ( 'Graph has a cycle' ) in_deg [ next_node ] -= 1 if in_deg [ next_node ] == 0 : stk . append ( next_node ) # Nodes may not be walked because they don't reach in degree 0. if len ( ordered ) != len ( graph ) - 1 : raise ValueError ( 'Graph has a cycle' ) return ordered
4,975
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/graph_builder.py#L231-L266
[ "def", "delete", "(", "self", ",", "addon_id", ",", "data", "=", "{", "}", ",", "*", "*", "kwargs", ")", ":", "return", "super", "(", "Addon", ",", "self", ")", ".", "delete", "(", "addon_id", ",", "data", ",", "*", "*", "kwargs", ")" ]
Fallback for cases in which the graph has cycles .
def sort_cyclic_graph_best_effort ( graph , pick_first = 'head' ) : ordered = [ ] visited = set ( ) # Go first on the pick_first chain then go back again on the others # that were not visited. Given the way the graph is built both chains # will always contain all the elements. if pick_first == 'head' : fst_attr , snd_attr = ( 'head_node' , 'update_node' ) else : fst_attr , snd_attr = ( 'update_node' , 'head_node' ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , fst_attr ) if current not in visited and current is not None : ordered . append ( current ) current = FIRST while current is not None : visited . add ( current ) current = getattr ( graph [ current ] , snd_attr ) if current not in visited and current is not None : ordered . append ( current ) return ordered
4,976
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/graph_builder.py#L269-L293
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Retrieve an url .
def get ( url ) : writeln ( "Getting data from url" , url ) response = requests . get ( url ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
4,977
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/examples/httprequest.py#L11-L18
[ "def", "pivot_wavelength", "(", "self", ")", ":", "wl", "=", "self", ".", "registry", ".", "_pivot_wavelengths", ".", "get", "(", "(", "self", ".", "telescope", ",", "self", ".", "band", ")", ")", "if", "wl", "is", "not", "None", ":", "return", "wl", "wl", "=", "self", ".", "calc_pivot_wavelength", "(", ")", "self", ".", "registry", ".", "register_pivot_wavelength", "(", "self", ".", "telescope", ",", "self", ".", "band", ",", "wl", ")", "return", "wl" ]
Post data to an url .
def post ( url , var ) : data = { b [ 0 ] : b [ 1 ] for b in [ a . split ( "=" ) for a in var ] } writeln ( "Sending data to url" , url ) response = requests . post ( url , data = data ) if response . status_code == 200 : writeln ( response . text ) else : writeln ( str ( response . status_code ) , response . reason )
4,978
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/examples/httprequest.py#L23-L31
[ "def", "deserialize_non_framed_values", "(", "stream", ",", "header", ",", "verifier", "=", "None", ")", ":", "_LOGGER", ".", "debug", "(", "\"Starting non-framed body iv/tag deserialization\"", ")", "(", "data_iv", ",", "data_length", ")", "=", "unpack_values", "(", "\">{}sQ\"", ".", "format", "(", "header", ".", "algorithm", ".", "iv_len", ")", ",", "stream", ",", "verifier", ")", "return", "data_iv", ",", "data_length" ]
cast str or bytes to bytes
def cast_bytes ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s elif isinstance ( s , str ) : return s . encode ( encoding , errors ) else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
4,979
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/utils.py#L46-L53
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
cast bytes or str to str
def cast_str ( s , encoding = 'utf8' , errors = 'strict' ) : if isinstance ( s , bytes ) : return s . decode ( encoding , errors ) elif isinstance ( s , str ) : return s else : raise TypeError ( "Expected unicode or bytes, got %r" % s )
4,980
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/utils.py#L57-L64
[ "def", "load", "(", "filename", ")", ":", "file", "=", "open", "(", "filename", ",", "'rb'", ")", "container", "=", "std_pickle", ".", "load", "(", "file", ")", "file", ".", "close", "(", ")", "db", "=", "Database", "(", "file", ".", "name", ")", "chains", "=", "0", "funs", "=", "set", "(", ")", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "container", ")", ":", "if", "k", "==", "'_state_'", ":", "db", ".", "_state_", "=", "v", "else", ":", "db", ".", "_traces", "[", "k", "]", "=", "Trace", "(", "name", "=", "k", ",", "value", "=", "v", ",", "db", "=", "db", ")", "setattr", "(", "db", ",", "k", ",", "db", ".", "_traces", "[", "k", "]", ")", "chains", "=", "max", "(", "chains", ",", "len", "(", "v", ")", ")", "funs", ".", "add", "(", "k", ")", "db", ".", "chains", "=", "chains", "db", ".", "trace_names", "=", "chains", "*", "[", "list", "(", "funs", ")", "]", "return", "db" ]
cast timestamp to datetime or date str
def cast_datetime ( ts , fmt = None ) : dt = datetime . datetime . fromtimestamp ( ts ) if fmt : return dt . strftime ( fmt ) return dt
4,981
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/utils.py#L68-L73
[ "def", "plot_bcr", "(", "fignum", ",", "Bcr1", ",", "Bcr2", ")", ":", "plt", ".", "figure", "(", "num", "=", "fignum", ")", "plt", ".", "plot", "(", "Bcr1", ",", "Bcr2", ",", "'ro'", ")", "plt", ".", "xlabel", "(", "'Bcr1'", ")", "plt", ".", "ylabel", "(", "'Bcr2'", ")", "plt", ".", "title", "(", "'Compare coercivity of remanence'", ")" ]
>>> from Redy . Magic . Classic import singleton >>>
def singleton_init_by ( init_fn = None ) : if not init_fn : def wrap_init ( origin_init ) : return origin_init else : def wrap_init ( origin_init ) : def __init__ ( self ) : origin_init ( self ) init_fn ( self ) return __init__ def inner ( cls_def : type ) : if not hasattr ( cls_def , '__instancecheck__' ) or isinstance ( cls_def . __instancecheck__ , ( types . BuiltinMethodType , _slot_wrapper ) ) : def __instancecheck__ ( self , instance ) : return instance is self cls_def . __instancecheck__ = __instancecheck__ _origin_init = cls_def . __init__ cls_def . __init__ = wrap_init ( _origin_init ) return cls_def ( ) return inner
4,982
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Magic/Classic.py#L18-L51
[ "def", "_get_segments", "(", "self", ",", "start", ",", "request_size", ")", ":", "if", "not", "request_size", ":", "return", "[", "]", "end", "=", "start", "+", "request_size", "futures", "=", "[", "]", "while", "request_size", ">", "self", ".", "_max_request_size", ":", "futures", ".", "append", "(", "self", ".", "_get_segment", "(", "start", ",", "self", ".", "_max_request_size", ")", ")", "request_size", "-=", "self", ".", "_max_request_size", "start", "+=", "self", ".", "_max_request_size", "if", "start", "<", "end", ":", "futures", ".", "append", "(", "self", ".", "_get_segment", "(", "start", ",", "end", "-", "start", ")", ")", "return", "[", "fut", ".", "get_result", "(", ")", "for", "fut", "in", "futures", "]" ]
>>> from Redy . Magic . Classic import const_return >>>
def const_return ( func ) : result = _undef def ret_call ( * args , * * kwargs ) : nonlocal result if result is _undef : result = func ( * args , * * kwargs ) return result return ret_call
4,983
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Magic/Classic.py#L57-L74
[ "def", "aggregate", "(", "self", ",", "val1", ",", "val2", ")", ":", "assert", "val1", "is", "not", "None", "assert", "val2", "is", "not", "None", "return", "self", ".", "_aggregator", "(", "val1", ",", "val2", ")" ]
>>> from Redy . Magic . Classic import execute >>> x = 1 >>>
def execute ( func : types . FunctionType ) : spec = getfullargspec ( func ) default = spec . defaults arg_cursor = 0 def get_item ( name ) : nonlocal arg_cursor ctx = func . __globals__ value = ctx . get ( name , _undef ) if value is _undef : try : value = default [ arg_cursor ] arg_cursor += 1 except ( TypeError , IndexError ) : raise ValueError ( f"Current context has no variable `{name}`" ) return value return func ( * ( get_item ( arg_name ) for arg_name in spec . args ) )
4,984
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Magic/Classic.py#L77-L102
[ "def", "new_keypair", "(", "key", ",", "value", ",", "ambig", ",", "unambig", ")", ":", "if", "key", "in", "ambig", ":", "return", "if", "key", "in", "unambig", "and", "value", "!=", "unambig", "[", "key", "]", ":", "ambig", ".", "add", "(", "key", ")", "del", "unambig", "[", "key", "]", "return", "unambig", "[", "key", "]", "=", "value", "return" ]
>>> from Redy . Magic . Classic import cast >>>
def cast ( cast_fn ) : def inner ( func ) : def call ( * args , * * kwargs ) : return cast_fn ( func ( * args , * * kwargs ) ) functools . update_wrapper ( call , func ) return call return inner
4,985
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Magic/Classic.py#L105-L123
[ "def", "display", "(", "self", ")", ":", "total", "=", "0", "count", "=", "0", "for", "i", ",", "result", "in", "enumerate", "(", "self", ".", "_results", ")", ":", "if", "total", "==", "0", ":", "self", ".", "pre_write", "(", ")", "self", ".", "write", "(", "result", ")", "count", "+=", "1", "total", "+=", "1", "if", "(", "count", ">=", "self", ".", "pagesize", "and", "self", ".", "pagesize", ">", "0", "and", "i", "<", "len", "(", "self", ".", "_results", ")", "-", "1", ")", ":", "self", ".", "wait", "(", ")", "count", "=", "0", "if", "total", "==", "0", ":", "self", ".", "_ostream", ".", "write", "(", "\"No results\\n\"", ")", "else", ":", "self", ".", "post_write", "(", ")" ]
add a new action with specific priority
def insert ( self , action : Action , where : 'Union[int, Delegate.Where]' ) : if isinstance ( where , int ) : self . actions . insert ( where , action ) return here = where ( self . actions ) self . actions . insert ( here , action )
4,986
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Async/Delegate.py#L55-L68
[ "def", "close_cache", "(", "self", ")", ":", "# Close all WS_Shinken cache files", "for", "server", "in", "self", ".", "servers", ":", "if", "self", ".", "servers", "[", "server", "]", "[", "'cache'", "]", "==", "True", ":", "self", ".", "servers", "[", "server", "]", "[", "'file'", "]", ".", "close", "(", ")" ]
Translates a dictdiffer conflict into a json_merger one .
def patch_to_conflict_set ( patch ) : patch_type , patched_key , value = patch if isinstance ( patched_key , list ) : key_path = tuple ( patched_key ) else : key_path = tuple ( k for k in patched_key . split ( '.' ) if k ) conflicts = set ( ) if patch_type == REMOVE : conflict_type = ConflictType . REMOVE_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , None ) ) elif patch_type == CHANGE : conflict_type = ConflictType . SET_FIELD first_val , second_val = value conflicts . add ( Conflict ( conflict_type , key_path , second_val ) ) elif patch_type == ADD : conflict_type = ConflictType . SET_FIELD for key , obj in value : conflicts . add ( Conflict ( conflict_type , key_path + ( key , ) , obj ) ) return conflicts
4,987
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/dict_merger.py#L54-L76
[ "def", "isApplicationInstalled", "(", "self", ",", "pchAppKey", ")", ":", "fn", "=", "self", ".", "function_table", ".", "isApplicationInstalled", "result", "=", "fn", "(", "pchAppKey", ")", "return", "result" ]
Perform merge of head and update starting from root .
def merge ( self ) : if isinstance ( self . head , dict ) and isinstance ( self . update , dict ) : if not isinstance ( self . root , dict ) : self . root = { } self . _merge_dicts ( ) else : self . _merge_base_values ( ) if self . conflict_set : raise MergeError ( 'Dictdiffer Errors' , self . conflicts )
4,988
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/dict_merger.py#L236-L246
[ "def", "get_session_token", "(", "self", ")", ":", "# self.logging.info('Getting session token')", "# Rather than testing any previous session tokens to see if they are still valid, simply delete old tokens in", "# preparation of the creation of new ones", "try", ":", "os", ".", "remove", "(", "os", ".", "path", ".", "join", "(", "self", ".", "file_path", ",", "'session_token'", ")", ")", "except", "FileNotFoundError", ":", "pass", "# Create a new session", "session_request", "=", "OAuth1Session", "(", "self", ".", "consumer_key", ",", "self", ".", "consumer_secret", ",", "access_token", "=", "self", ".", "access_token", ",", "access_token_secret", "=", "self", ".", "access_secret", ")", "# Perform a GET request with the appropriate keys and tokens", "r", "=", "session_request", ".", "get", "(", "self", ".", "session_token_url", ")", "# If the status code is '200' (OK), proceed", "if", "r", ".", "status_code", "==", "200", ":", "# Save the JSON-decoded token secret and token", "self", ".", "session_token", "=", "r", ".", "json", "(", ")", "[", "'oauth_token'", "]", "self", ".", "session_secret", "=", "r", ".", "json", "(", ")", "[", "'oauth_token_secret'", "]", "# Write the token and secret to file", "self", ".", "write_token", "(", "'session_token'", ",", "self", ".", "session_token", ",", "self", ".", "session_secret", ")", "# Any other status than 200 is considered a failure", "else", ":", "print", "(", "'Failed:'", ")", "print", "(", "r", ".", "json", "(", ")", "[", "'message'", "]", ")" ]
Computes distance between 2D points using chebyshev metric
def chebyshev ( point1 , point2 ) : return max ( abs ( point1 [ 0 ] - point2 [ 0 ] ) , abs ( point1 [ 1 ] - point2 [ 1 ] ) )
4,989
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L26-L37
[ "def", "add_colortable", "(", "self", ",", "fobj", ",", "name", ")", ":", "self", "[", "name", "]", "=", "read_colortable", "(", "fobj", ")", "self", "[", "name", "+", "'_r'", "]", "=", "self", "[", "name", "]", "[", ":", ":", "-", "1", "]" ]
Scan pixels in a circle pattern around a center point
def circlescan ( x0 , y0 , r1 , r2 ) : # Validate inputs if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) # List of pixels visited in previous diameter previous = [ ] # Scan distances outward (1) or inward (-1) rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : if distance == 0 : yield x0 , y0 else : # Computes points for first octant and the rotate by multiples of # 45 degrees to compute the other octants a = 0.707107 rotations = { 0 : [ [ 1 , 0 ] , [ 0 , 1 ] ] , 1 : [ [ a , a ] , [ - a , a ] ] , 2 : [ [ 0 , 1 ] , [ - 1 , 0 ] ] , 3 : [ [ - a , a ] , [ - a , - a ] ] , 4 : [ [ - 1 , 0 ] , [ 0 , - 1 ] ] , 5 : [ [ - a , - a ] , [ a , - a ] ] , 6 : [ [ 0 , - 1 ] , [ 1 , 0 ] ] , 7 : [ [ a , - a ] , [ a , a ] ] } nangles = len ( rotations ) # List of pixels visited in current diameter current = [ ] for angle in range ( nangles ) : x = 0 y = distance d = 1 - distance while x < y : xr = rotations [ angle ] [ 0 ] [ 0 ] * x + rotations [ angle ] [ 0 ] [ 1 ] * y yr = rotations [ angle ] [ 1 ] [ 0 ] * x + rotations [ angle ] [ 1 ] [ 1 ] * y xr = x0 + xr yr = y0 + yr # First check if point was in previous diameter # since our scan pattern can lead to duplicates in # neighboring diameters point = ( int ( round ( xr ) ) , int ( round ( yr ) ) ) if point not in previous : yield xr , yr current . append ( point ) # Move pixel according to circle constraint if ( d < 0 ) : d += 3 + 2 * x else : d += 5 - 2 * ( y - x ) y -= 1 x += 1 previous = current
4,990
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L392-L466
[ "def", "ranker", "(", "self", ",", "X", ",", "meta", ")", ":", "# total score is just a sum of each row", "total_score", "=", "X", ".", "sum", "(", "axis", "=", "1", ")", ".", "transpose", "(", ")", "total_score", "=", "np", ".", "squeeze", "(", "np", ".", "asarray", "(", "total_score", ")", ")", "# matrix to array", "ranks", "=", "total_score", ".", "argsort", "(", ")", "ranks", "=", "ranks", "[", ":", ":", "-", "1", "]", "# sort the list of dicts according to ranks", "sorted_meta", "=", "[", "meta", "[", "r", "]", "for", "r", "in", "ranks", "]", "sorted_X", "=", "X", "[", "ranks", "]", "return", "(", "sorted_X", ",", "sorted_meta", ")" ]
Scan pixels in a grid pattern along the x - coordinate then y - coordinate
def gridscan ( xi , yi , xf , yf , stepx = 1 , stepy = 1 ) : if stepx <= 0 : raise ValueError ( "X-step must be positive" ) if stepy <= 0 : raise ValueError ( "Y-step must be positive" ) # Determine direction to move dx = stepx if xf >= xi else - stepx dy = stepy if yf >= yi else - stepy for y in range ( yi , yf + dy , dy ) : for x in range ( xi , xf + dx , dx ) : yield x , y
4,991
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L468-L496
[ "def", "get_description", "(", "self", ")", ":", "vo", "=", "ffi", ".", "cast", "(", "'VipsObject *'", ",", "self", ".", "pointer", ")", "return", "_to_string", "(", "vips_lib", ".", "vips_object_get_description", "(", "vo", ")", ")" ]
Scan pixels in a ring pattern around a center point clockwise
def ringscan ( x0 , y0 , r1 , r2 , metric = chebyshev ) : # Validate inputs if r1 < 0 : raise ValueError ( "Initial radius must be non-negative" ) if r2 < 0 : raise ValueError ( "Final radius must be non-negative" ) if not hasattr ( metric , "__call__" ) : raise TypeError ( "Metric not callable" ) # Define clockwise step directions direction = 0 steps = { 0 : [ 1 , 0 ] , 1 : [ 1 , - 1 ] , 2 : [ 0 , - 1 ] , 3 : [ - 1 , - 1 ] , 4 : [ - 1 , 0 ] , 5 : [ - 1 , 1 ] , 6 : [ 0 , 1 ] , 7 : [ 1 , 1 ] } nsteps = len ( steps ) center = [ x0 , y0 ] # Scan distances outward (1) or inward (-1) rstep = 1 if r2 >= r1 else - 1 for distance in range ( r1 , r2 + rstep , rstep ) : initial = [ x0 , y0 + distance ] current = initial # Number of tries to find a valid neighrbor ntrys = 0 while True : # Short-circuit special case if distance == 0 : yield current [ 0 ] , current [ 1 ] break # Try and take a step and check if still within distance nextpoint = [ current [ i ] + steps [ direction ] [ i ] for i in range ( 2 ) ] if metric ( center , nextpoint ) != distance : # Check if we tried all step directions and failed ntrys += 1 if ntrys == nsteps : break # Try the next direction direction = ( direction + 1 ) % nsteps continue ntrys = 0 yield current [ 0 ] , current [ 1 ] # Check if we have come all the way around current = nextpoint if current == initial : break # Check if we tried all step directions and failed if ntrys == nsteps : break
4,992
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L528-L604
[ "def", "load_vocab", "(", "self", ",", "vocab_name", ",", "*", "*", "kwargs", ")", ":", "log", ".", "setLevel", "(", "kwargs", ".", "get", "(", "\"log_level\"", ",", "self", ".", "log_level", ")", ")", "vocab", "=", "self", ".", "get_vocab", "(", "vocab_name", ",", "*", "*", "kwargs", ")", "if", "vocab", "[", "'filename'", "]", "in", "self", ".", "loaded", ":", "if", "self", ".", "loaded_times", ".", "get", "(", "vocab", "[", "'filename'", "]", ",", "datetime", ".", "datetime", "(", "2001", ",", "1", ",", "1", ")", ")", ".", "timestamp", "(", ")", "<", "vocab", "[", "'modified'", "]", ":", "self", ".", "drop_file", "(", "vocab", "[", "'filename'", "]", ",", "*", "*", "kwargs", ")", "else", ":", "return", "conn", "=", "kwargs", ".", "get", "(", "\"conn\"", ",", "self", ".", "conn", ")", "conn", ".", "load_data", "(", "graph", "=", "getattr", "(", "__NSM__", ".", "kdr", ",", "vocab", "[", "'filename'", "]", ")", ".", "clean_uri", ",", "data", "=", "vocab", "[", "'data'", "]", ",", "datatype", "=", "vocab", "[", "'filename'", "]", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", ",", "log_level", "=", "logging", ".", "WARNING", ")", "self", ".", "__update_time__", "(", "vocab", "[", "'filename'", "]", ",", "*", "*", "kwargs", ")", "log", ".", "warning", "(", "\"\\n\\tvocab: '%s' loaded \\n\\tconn: '%s'\"", ",", "vocab", "[", "'filename'", "]", ",", "conn", ")", "self", ".", "loaded", ".", "append", "(", "vocab", "[", "'filename'", "]", ")" ]
Scan pixels in a snake pattern along the x - coordinate then y - coordinate
def snakescan ( xi , yi , xf , yf ) : # Determine direction to move dx = 1 if xf >= xi else - 1 dy = 1 if yf >= yi else - 1 # Scan pixels first along x-coordinate then y-coordinate and flip # x-direction when the end of the line is reached x , xa , xb = xi , xi , xf for y in range ( yi , yf + dy , dy ) : for x in range ( xa , xb + dx , dx ) : yield x , y # Swap x-direction if x == xa or x == xb : dx *= - 1 xa , xb = xb , xa
4,993
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L606-L635
[ "def", "is_rate_matrix", "(", "K", ",", "tol", "=", "1e-12", ")", ":", "K", "=", "_types", ".", "ensure_ndarray_or_sparse", "(", "K", ",", "ndim", "=", "2", ",", "uniform", "=", "True", ",", "kind", "=", "'numeric'", ")", "if", "_issparse", "(", "K", ")", ":", "return", "sparse", ".", "assessment", ".", "is_rate_matrix", "(", "K", ",", "tol", ")", "else", ":", "return", "dense", ".", "assessment", ".", "is_rate_matrix", "(", "K", ",", "tol", ")" ]
Scan pixels in a random walk pattern with given step probabilities . The random walk will continue indefinitely unless a skip transformation is used with the stop parameter set or a clip transformation is used with the abort parameter set to True . The probabilities are normalized to sum to 1 .
def walkscan ( x0 , y0 , xn = 0.25 , xp = 0.25 , yn = 0.25 , yp = 0.25 ) : # Validate inputs if xn < 0 : raise ValueError ( "Negative x probabilty must be non-negative" ) if xp < 0 : raise ValueError ( "Positive x probabilty must be non-negative" ) if yn < 0 : raise ValueError ( "Negative y probabilty must be non-negative" ) if yp < 0 : raise ValueError ( "Positive y probabilty must be non-negative" ) # Compute normalized probability total = xp + xn + yp + yn xn /= total xp /= total yn /= total yp /= total # Compute cumulative probability cxn = xn cxp = cxn + xp cyn = cxp + yn # Initialize position x , y = x0 , y0 while True : yield x , y # Take random step probability = random . random ( ) if probability <= cxn : x -= 1 elif probability <= cxp : x += 1 elif probability <= cyn : y -= 1 else : y += 1
4,994
https://github.com/dpmcmlxxvi/pixelscan/blob/d641207b13a8fc5bf7ac9964b982971652bb0a7e/pixelscan/pixelscan.py#L637-L691
[ "def", "with_headers", "(", "self", ",", "headers", ")", ":", "return", "self", ".", "replace", "(", "headers", "=", "_merge_maps", "(", "self", ".", "headers", ",", "headers", ")", ")" ]
Validates whether a configuration is valid .
def validate ( self ) : if self . access_token is None : raise ConfigurationError ( 'No access token provided. ' 'Set your access token during client initialization using: ' '"basecrm.Client(access_token= <YOUR_PERSONAL_ACCESS_TOKEN>)"' ) if re . search ( r'\s' , self . access_token ) : raise ConfigurationError ( 'Provided access token is invalid ' 'as it contains disallowed characters. ' 'Please double-check you access token.' ) if len ( self . access_token ) != 64 : raise ConfigurationError ( 'Provided access token is invalid ' 'as it has invalid length. ' 'Please double-check your access token.' ) if not self . base_url or not re . match ( self . URL_REGEXP , self . base_url ) : raise ConfigurationError ( 'Provided base url is invalid ' 'as it not a valid URI. ' 'Please make sure it incldues the schema part, ' 'both http and https are accepted, ' 'and the hierarchical part' ) return True
4,995
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/configuration.py#L35-L66
[ "def", "login", "(", "self", ")", ":", "access_token", "=", "self", ".", "_get_access_token", "(", ")", "try", ":", "super", "(", "IAMSession", ",", "self", ")", ".", "request", "(", "'POST'", ",", "self", ".", "_session_url", ",", "headers", "=", "{", "'Content-Type'", ":", "'application/json'", "}", ",", "data", "=", "json", ".", "dumps", "(", "{", "'access_token'", ":", "access_token", "}", ")", ")", ".", "raise_for_status", "(", ")", "except", "RequestException", ":", "raise", "CloudantException", "(", "'Failed to exchange IAM token with Cloudant'", ")" ]
Start synchronization flow
def start ( self , device_uuid ) : status_code , _ , session = self . http_client . post ( '/sync/start' , body = None , headers = self . build_headers ( device_uuid ) ) return None if status_code == 204 else session
4,996
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/sync.py#L18-L35
[ "def", "ReadAtOffset", "(", "self", ",", "offset", ",", "size", "=", "None", ")", ":", "if", "size", "is", "not", "None", "and", "size", "<", "0", ":", "raise", "ValueError", "(", "'Invalid size value {0!s}'", ".", "format", "(", "size", ")", ")", "if", "offset", "<", "0", ":", "raise", "ValueError", "(", "'Invalid offset value {0!s}'", ".", "format", "(", "offset", ")", ")", "if", "size", "==", "0", "or", "offset", ">=", "self", ".", "uncompressed_data_size", ":", "return", "b''", "if", "self", ".", "_cache_start_offset", "is", "None", ":", "self", ".", "_LoadDataIntoCache", "(", "self", ".", "_file_object", ",", "offset", ")", "if", "offset", ">", "self", ".", "_cache_end_offset", "or", "offset", "<", "self", ".", "_cache_start_offset", ":", "self", ".", "FlushCache", "(", ")", "self", ".", "_LoadDataIntoCache", "(", "self", ".", "_file_object", ",", "offset", ")", "cache_offset", "=", "offset", "-", "self", ".", "_cache_start_offset", "if", "not", "size", ":", "return", "self", ".", "_cache", "[", "cache_offset", ":", "]", "data_end_offset", "=", "cache_offset", "+", "size", "if", "data_end_offset", ">", "self", ".", "_cache_end_offset", ":", "return", "self", ".", "_cache", "[", "cache_offset", ":", "]", "return", "self", ".", "_cache", "[", "cache_offset", ":", "data_end_offset", "]" ]
Get data from queue
def fetch ( self , device_uuid , session_id ) : status_code , _ , root = self . http_client . get ( "/sync/{session_id}/queues/main" . format ( session_id = session_id ) , params = None , headers = self . build_headers ( device_uuid ) , raw = True ) return [ ] if status_code == 204 else root [ 'items' ]
4,997
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/sync.py#L37-L59
[ "def", "fingerprint", "(", "self", ",", "option_type", ",", "option_val", ")", ":", "if", "option_val", "is", "None", ":", "return", "None", "# Wrapping all other values in a list here allows us to easily handle single-valued and", "# list-valued options uniformly. For non-list-valued options, this will be a singleton list", "# (with the exception of dict, which is not modified). This dict exception works because we do", "# not currently have any \"list of dict\" type, so there is no ambiguity.", "if", "not", "isinstance", "(", "option_val", ",", "(", "list", ",", "tuple", ",", "dict", ")", ")", ":", "option_val", "=", "[", "option_val", "]", "if", "option_type", "==", "target_option", ":", "return", "self", ".", "_fingerprint_target_specs", "(", "option_val", ")", "elif", "option_type", "==", "dir_option", ":", "return", "self", ".", "_fingerprint_dirs", "(", "option_val", ")", "elif", "option_type", "==", "file_option", ":", "return", "self", ".", "_fingerprint_files", "(", "option_val", ")", "elif", "option_type", "==", "dict_with_files_option", ":", "return", "self", ".", "_fingerprint_dict_with_files", "(", "option_val", ")", "else", ":", "return", "self", ".", "_fingerprint_primitives", "(", "option_val", ")" ]
Acknowledge received data
def ack ( self , device_uuid , ack_keys ) : attributes = { 'ack_keys' : ack_keys } status_code , _ , _ = self . http_client . post ( '/sync/ack' , body = attributes , headers = self . build_headers ( device_uuid ) ) return status_code == 202
4,998
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/sync.py#L61-L80
[ "def", "save_vocabulary", "(", "self", ",", "vocab_path", ")", ":", "index", "=", "0", "if", "os", ".", "path", ".", "isdir", "(", "vocab_path", ")", ":", "vocab_file", "=", "os", ".", "path", ".", "join", "(", "vocab_path", ",", "VOCAB_NAME", ")", "with", "open", "(", "vocab_file", ",", "\"w\"", ",", "encoding", "=", "\"utf-8\"", ")", "as", "writer", ":", "for", "token", ",", "token_index", "in", "sorted", "(", "self", ".", "vocab", ".", "items", "(", ")", ",", "key", "=", "lambda", "kv", ":", "kv", "[", "1", "]", ")", ":", "if", "index", "!=", "token_index", ":", "logger", ".", "warning", "(", "\"Saving vocabulary to {}: vocabulary indices are not consecutive.\"", "\" Please check that the vocabulary is not corrupted!\"", ".", "format", "(", "vocab_file", ")", ")", "index", "=", "token_index", "writer", ".", "write", "(", "token", "+", "u'\\n'", ")", "index", "+=", "1", "return", "vocab_file" ]
Perform a full synchronization flow .
def fetch ( self , callback ) : # Set up a new synchronization session for a given device's UUID session = self . client . sync . start ( self . device_uuid ) # Check if there is anything to synchronize if session is None or 'id' not in session : return # Drain the main queue until there is no more data (empty array) while True : # Fetch the main queue queue_items = self . client . sync . fetch ( self . device_uuid , session [ 'id' ] ) # nothing more to synchronize ? if not queue_items : break # let client know about both data and meta ack_keys = [ ] for item in queue_items : if callback ( item [ 'meta' ] , item [ 'data' ] ) : ack_keys . append ( item [ 'meta' ] [ 'sync' ] [ 'ack_key' ] ) # As we fetch new data, we need to send acknowledgement keys # if any .. if ack_keys : self . client . sync . ack ( self . device_uuid , ack_keys )
4,999
https://github.com/basecrm/basecrm-python/blob/7c1cf97dbaba8aeb9ff89f8a54f945a8702349f6/basecrm/sync.py#L117-L159
[ "def", "wrap", "(", "vtkdataset", ")", ":", "wrappers", "=", "{", "'vtkUnstructuredGrid'", ":", "vtki", ".", "UnstructuredGrid", ",", "'vtkRectilinearGrid'", ":", "vtki", ".", "RectilinearGrid", ",", "'vtkStructuredGrid'", ":", "vtki", ".", "StructuredGrid", ",", "'vtkPolyData'", ":", "vtki", ".", "PolyData", ",", "'vtkImageData'", ":", "vtki", ".", "UniformGrid", ",", "'vtkStructuredPoints'", ":", "vtki", ".", "UniformGrid", ",", "'vtkMultiBlockDataSet'", ":", "vtki", ".", "MultiBlock", ",", "}", "key", "=", "vtkdataset", ".", "GetClassName", "(", ")", "try", ":", "wrapped", "=", "wrappers", "[", "key", "]", "(", "vtkdataset", ")", "except", ":", "logging", ".", "warning", "(", "'VTK data type ({}) is not currently supported by vtki.'", ".", "format", "(", "key", ")", ")", "return", "vtkdataset", "# if not supported just passes the VTK data object", "return", "wrapped" ]