query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Make a puzzle variant from a gemini variant
def _format_variant ( self , case_id , gemini_variant , individual_objs , index = 0 , add_all_info = False ) : chrom = gemini_variant [ 'chrom' ] if chrom . startswith ( 'chr' ) or chrom . startswith ( 'CHR' ) : chrom = chrom [ 3 : ] variant_dict = { 'CHROM' : chrom , 'POS' : str ( gemini_variant [ 'start' ] ) , 'ID' : gemini_variant [ 'rs_ids' ] , 'REF' : gemini_variant [ 'ref' ] , 'ALT' : gemini_variant [ 'alt' ] , 'QUAL' : gemini_variant [ 'qual' ] , 'FILTER' : gemini_variant [ 'filter' ] } variant = Variant ( * * variant_dict ) # Use the gemini id for fast search variant . update_variant_id ( gemini_variant [ 'variant_id' ] ) logger . debug ( "Creating a variant object of variant {0}" . format ( variant . variant_id ) ) variant [ 'index' ] = index # Add the most severe consequence self . _add_most_severe_consequence ( variant , gemini_variant ) #Add the impact severity self . _add_impact_severity ( variant , gemini_variant ) ### POSITON ANNOATTIONS ### variant . start = int ( gemini_variant [ 'start' ] ) variant . stop = int ( gemini_variant [ 'end' ] ) #Add the sv specific coordinates if self . variant_type == 'sv' : variant . sv_type = gemini_variant [ 'sub_type' ] variant . stop = int ( gemini_variant [ 'end' ] ) self . _add_sv_coordinates ( variant ) else : ### Consequence and region annotations #Add the transcript information self . _add_transcripts ( variant , gemini_variant ) self . _add_thousand_g ( variant , gemini_variant ) self . _add_exac ( variant , gemini_variant ) self . _add_gmaf ( variant , gemini_variant ) #### Check the impact annotations #### if gemini_variant [ 'cadd_scaled' ] : variant . cadd_score = gemini_variant [ 'cadd_scaled' ] # We use the prediction in text polyphen = gemini_variant [ 'polyphen_pred' ] if polyphen : variant . add_severity ( 'Polyphen' , polyphen ) # We use the prediction in text sift = gemini_variant [ 'sift_pred' ] if sift : variant . add_severity ( 'SIFT' , sift ) #Add the genes based on the hgnc symbols self . _add_hgnc_symbols ( variant ) if self . variant_type == 'snv' : self . _add_genes ( variant ) self . _add_consequences ( variant ) ### GENOTYPE ANNOATTIONS ### #Get the genotype info if add_all_info : self . _add_genotypes ( variant , gemini_variant , case_id , individual_objs ) if self . variant_type == 'sv' : self . _add_genes ( variant ) return variant
5,100
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/gemini/mixins/variant.py#L237-L323
[ "def", "CanonicalPathToLocalPath", "(", "path", ")", ":", "# Account for raw devices", "path", "=", "path", ".", "replace", "(", "\"/\\\\\"", ",", "\"\\\\\"", ")", "path", "=", "path", ".", "replace", "(", "\"/\"", ",", "\"\\\\\"", ")", "m", "=", "re", ".", "match", "(", "r\"\\\\([a-zA-Z]):(.*)$\"", ",", "path", ")", "if", "m", ":", "path", "=", "\"%s:\\\\%s\"", "%", "(", "m", ".", "group", "(", "1", ")", ",", "m", ".", "group", "(", "2", ")", ".", "lstrip", "(", "\"\\\\\"", ")", ")", "return", "path" ]
Check if the variant is a variation in any of the individuals
def _is_variant ( self , gemini_variant , ind_objs ) : indexes = ( ind . ind_index for ind in ind_objs ) #Check if any individual have a heterozygous or homozygous variant call for index in indexes : gt_call = gemini_variant [ 'gt_types' ] [ index ] if ( gt_call == 1 or gt_call == 3 ) : return True return False
5,101
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/gemini/mixins/variant.py#L325-L343
[ "def", "get_power_status", "(", ")", "->", "SystemPowerStatus", ":", "get_system_power_status", "=", "ctypes", ".", "windll", ".", "kernel32", ".", "GetSystemPowerStatus", "get_system_power_status", ".", "argtypes", "=", "[", "ctypes", ".", "POINTER", "(", "SystemPowerStatus", ")", "]", "get_system_power_status", ".", "restype", "=", "wintypes", ".", "BOOL", "status", "=", "SystemPowerStatus", "(", ")", "if", "not", "get_system_power_status", "(", "ctypes", ".", "pointer", "(", "status", ")", ")", ":", "raise", "ctypes", ".", "WinError", "(", ")", "else", ":", "return", "status" ]
Boolean for telling if the sample is affected .
def is_affected ( self ) : phenotype = self . phenotype if phenotype == '1' : return False elif phenotype == '2' : return True else : return False
5,102
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/mixins.py#L17-L25
[ "def", "compose", "(", "list_of_files", ",", "destination_file", ",", "files_metadata", "=", "None", ",", "content_type", "=", "None", ",", "retry_params", "=", "None", ",", "_account_id", "=", "None", ")", ":", "api", "=", "storage_api", ".", "_get_storage_api", "(", "retry_params", "=", "retry_params", ",", "account_id", "=", "_account_id", ")", "if", "os", ".", "getenv", "(", "'SERVER_SOFTWARE'", ")", ".", "startswith", "(", "'Dev'", ")", ":", "def", "_temp_func", "(", "file_list", ",", "destination_file", ",", "content_type", ")", ":", "bucket", "=", "'/'", "+", "destination_file", ".", "split", "(", "'/'", ")", "[", "1", "]", "+", "'/'", "with", "open", "(", "destination_file", ",", "'w'", ",", "content_type", "=", "content_type", ")", "as", "gcs_merge", ":", "for", "source_file", "in", "file_list", ":", "with", "open", "(", "bucket", "+", "source_file", "[", "'Name'", "]", ",", "'r'", ")", "as", "gcs_source", ":", "gcs_merge", ".", "write", "(", "gcs_source", ".", "read", "(", ")", ")", "compose_object", "=", "_temp_func", "else", ":", "compose_object", "=", "api", ".", "compose_object", "file_list", ",", "_", "=", "_validate_compose_list", "(", "destination_file", ",", "list_of_files", ",", "files_metadata", ",", "32", ")", "compose_object", "(", "file_list", ",", "destination_file", ",", "content_type", ")" ]
Get a gene list from the database .
def gene_list ( self , list_id ) : return self . query ( GeneList ) . filter_by ( list_id = list_id ) . first ( )
5,103
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/genelist.py#L6-L8
[ "def", "_init_request_logging", "(", "self", ",", "app", ")", ":", "enabled", "=", "not", "app", ".", "config", ".", "get", "(", "CONF_DISABLE_REQUEST_LOGGING", ",", "False", ")", "if", "not", "enabled", ":", "return", "self", ".", "_requests_middleware", "=", "WSGIApplication", "(", "self", ".", "_key", ",", "app", ".", "wsgi_app", ",", "telemetry_channel", "=", "self", ".", "_channel", ")", "app", ".", "wsgi_app", "=", "self", ".", "_requests_middleware" ]
Create a new gene list and optionally link to cases .
def add_genelist ( self , list_id , gene_ids , case_obj = None ) : new_genelist = GeneList ( list_id = list_id ) new_genelist . gene_ids = gene_ids if case_obj : new_genelist . cases . append ( case_obj ) self . session . add ( new_genelist ) self . save ( ) return new_genelist
5,104
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/genelist.py#L14-L23
[ "def", "getOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTransformTrackedDeviceRelative", "punTrackedDevice", "=", "TrackedDeviceIndex_t", "(", ")", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "punTrackedDevice", ")", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "punTrackedDevice", ",", "pmatTrackedDeviceToOverlayTransform" ]
Remove a gene list and links to cases .
def remove_genelist ( self , list_id , case_obj = None ) : gene_list = self . gene_list ( list_id ) if case_obj : # remove a single link between case and gene list case_ids = [ case_obj . id ] else : # remove all links and the list itself case_ids = [ case . id for case in gene_list . cases ] self . session . delete ( gene_list ) case_links = self . query ( CaseGenelistLink ) . filter ( CaseGenelistLink . case_id . in_ ( case_ids ) , CaseGenelistLink . genelist_id == gene_list . id ) for case_link in case_links : self . session . delete ( case_link ) self . save ( )
5,105
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/genelist.py#L25-L44
[ "def", "getOverlayTransformTrackedDeviceRelative", "(", "self", ",", "ulOverlayHandle", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayTransformTrackedDeviceRelative", "punTrackedDevice", "=", "TrackedDeviceIndex_t", "(", ")", "pmatTrackedDeviceToOverlayTransform", "=", "HmdMatrix34_t", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "byref", "(", "punTrackedDevice", ")", ",", "byref", "(", "pmatTrackedDeviceToOverlayTransform", ")", ")", "return", "result", ",", "punTrackedDevice", ",", "pmatTrackedDeviceToOverlayTransform" ]
Get or create a new case specific gene list record .
def case_genelist ( self , case_obj ) : list_id = "{}-HPO" . format ( case_obj . case_id ) gene_list = self . gene_list ( list_id ) if gene_list is None : gene_list = GeneList ( list_id = list_id ) case_obj . gene_lists . append ( gene_list ) self . session . add ( gene_list ) return gene_list
5,106
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/actions/genelist.py#L46-L56
[ "def", "_sync", "(", "self", ")", ":", "if", "(", "self", ".", "_opcount", ">", "self", ".", "checkpoint_operations", "or", "datetime", ".", "now", "(", ")", ">", "self", ".", "_last_sync", "+", "self", ".", "checkpoint_timeout", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Synchronizing queue metadata.\"", ")", "self", ".", "queue_metadata", ".", "sync", "(", ")", "self", ".", "_last_sync", "=", "datetime", ".", "now", "(", ")", "self", ".", "_opcount", "=", "0", "else", ":", "self", ".", "log", ".", "debug", "(", "\"NOT synchronizing queue metadata.\"", ")" ]
Sets the size of the figure by expanding the space of molecule . svg file . These dimension have been previously determined . Also makes the lines of the molecule thicker .
def add_bigger_box ( self ) : start1 = "width='" + str ( int ( self . molecule . molsize1 ) ) + "px' height='" + str ( int ( self . molecule . molsize2 ) ) + "px' >" start2 = "<rect style='opacity:1.0;fill:#FFFFFF;stroke:none' width='" + str ( int ( self . molecule . molsize1 ) ) + "' height='" + str ( int ( self . molecule . molsize2 ) ) + "' x='0' y='0'> </rect>" bigger_box = "width='100%' height='100%' viewbox='0 0 " + str ( int ( self . molecule . x_dim ) ) + " " + str ( int ( self . molecule . y_dim ) ) + "' > " big_box2 = "<rect style='opacity:1.0;fill:white;stroke:none' width='" + str ( int ( self . molecule . x_dim ) ) + "px' height='" + str ( int ( self . molecule . y_dim ) ) + "px' x='0' y='0'> </rect> <g id='molecularDrawing' transform='translate(" + str ( ( self . molecule . x_dim - self . molecule . molsize1 ) / 2 ) + "," + str ( ( self . molecule . y_dim - self . molecule . molsize2 ) / 2 ) + ")'>'<rect style='opacity:1.0;fill:#ffffff;stroke:none' width='" + str ( self . molecule . molsize1 ) + "' height='" + str ( self . molecule . molsize2 ) + "' x='0' y='0' /> " self . end_symbol = "</svg>" no_end_symbol = "</g>" #Make the lines in molecule drawing thicker to look better with the large plots linewidth1 = "stroke-width:2px" linewidth2 = "stroke-width:5px" self . change_lines_in_svg ( "molecule.svg" , linewidth1 , linewidth2 ) self . change_lines_in_svg ( "molecule.svg" , start1 , bigger_box ) self . change_lines_in_svg ( "molecule.svg" , start2 , big_box2 ) self . change_lines_in_svg ( "molecule.svg" , self . end_symbol , no_end_symbol ) with open ( "molecule.svg" , "r" ) as f : lines = f . readlines ( ) self . filestart = " " . join ( map ( str , lines [ 0 : 8 ] ) ) self . draw_molecule = "" . join ( map ( str , lines [ 8 : ] ) ) f . close ( )
5,107
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/figure.py#L57-L80
[ "def", "render_unregistered", "(", "error", "=", "None", ")", ":", "return", "template", "(", "read_index_template", "(", ")", ",", "registered", "=", "False", ",", "error", "=", "error", ",", "seeder_data", "=", "None", ",", "url_id", "=", "None", ",", ")" ]
Extends with class or function
def extend_with ( func ) : if not func . __name__ in ArgParseInator . _plugins : ArgParseInator . _plugins [ func . __name__ ] = func
5,108
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L599-L602
[ "def", "repair", "(", "self", ",", "volume_id_or_uri", ",", "timeout", "=", "-", "1", ")", ":", "data", "=", "{", "\"type\"", ":", "\"ExtraManagedStorageVolumePaths\"", ",", "\"resourceUri\"", ":", "self", ".", "_client", ".", "build_uri", "(", "volume_id_or_uri", ")", "}", "custom_headers", "=", "{", "'Accept-Language'", ":", "'en_US'", "}", "uri", "=", "self", ".", "URI", "+", "'/repair'", "return", "self", ".", "_client", ".", "create", "(", "data", ",", "uri", "=", "uri", ",", "timeout", "=", "timeout", ",", "custom_headers", "=", "custom_headers", ")" ]
Dcorates a function or a class method to add to the argument parser
def arg ( * args , * * kwargs ) : def decorate ( func ) : """ Decorate """ # we'll set the command name with the passed cmd_name argument, if # exist, else the command name will be the function name func . __cmd_name__ = kwargs . pop ( 'cmd_name' , getattr ( func , '__cmd_name__' , func . __name__ ) ) # retrieve the class (SillyClass) func . __cls__ = utils . check_class ( ) if not hasattr ( func , '__arguments__' ) : # if the funcion hasn't the __arguments__ yet, we'll setup them # using get_functarguments. func . __arguments__ = utils . get_functarguments ( func ) if len ( args ) or len ( kwargs ) : # if we have some argument or keyword argument # we'll try to get the destination name from the kwargs ('dest') # else we'll use the last arg name as destination arg_name = kwargs . get ( 'dest' , args [ - 1 ] . lstrip ( '-' ) . replace ( '-' , '_' ) ) try : # we try to get the command index. idx = func . __named__ . index ( arg_name ) # and delete it from the named list del func . __named__ [ idx ] # and delete it from the arguments list del func . __arguments__ [ idx ] except ValueError : pass # append the args and kwargs to the function arguments list func . __arguments__ . append ( ( args , kwargs , ) ) if func . __cls__ is None and isinstance ( func , types . FunctionType ) : # if the function don't have a class and is a FunctionType # we'll add it directly to he commands list. ap_ = ArgParseInator ( skip_init = True ) if func . __cmd_name__ not in ap_ . commands : # we'll add it if not exists ap_ . commands [ func . __cmd_name__ ] = func return func return decorate
5,109
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L605-L648
[ "def", "log_summary", "(", "self", ")", ":", "participants", "=", "Participant", ".", "query", ".", "with_entities", "(", "Participant", ".", "status", ")", ".", "all", "(", ")", "counts", "=", "Counter", "(", "[", "p", ".", "status", "for", "p", "in", "participants", "]", ")", "sorted_counts", "=", "sorted", "(", "counts", ".", "items", "(", ")", ",", "key", "=", "itemgetter", "(", "0", ")", ")", "self", ".", "log", "(", "\"Status summary: {}\"", ".", "format", "(", "str", "(", "sorted_counts", ")", ")", ")", "return", "sorted_counts" ]
Decorates a class to handle the arguments parser .
def class_args ( cls ) : # get the Singleton ap_ = ArgParseInator ( skip_init = True ) # collect special vars (really need?) utils . collect_appendvars ( ap_ , cls ) # set class reference cls . __cls__ = cls cmds = { } # get eventual class arguments cls . __arguments__ = getattr ( cls , '__arguments__' , [ ] ) # cycle through class functions for func in [ f for f in cls . __dict__ . values ( ) if hasattr ( f , '__cmd_name__' ) and not inspect . isclass ( f ) ] : # clear subcommands func . __subcommands__ = None # set the parent class func . __cls__ = cls # assign to commands dict cmds [ func . __cmd_name__ ] = func if hasattr ( cls , '__cmd_name__' ) and cls . __cmd_name__ not in ap_ . commands : # if che class has the __cmd_name__ attribute and is not already present # in the ArgParseInator commands # set the class subcommands cls . __subcommands__ = cmds # add the class as ArgParseInator command ap_ . commands [ cls . __cmd_name__ ] = cls else : # else if we don't have a __cmd_name__ # we will add all the functions directly to the ArgParseInator commands # if it don't already exists. for name , func in cmds . items ( ) : if name not in ap_ . commands : ap_ . commands [ name ] = func return cls
5,110
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L651-L687
[ "def", "_openResources", "(", "self", ")", ":", "try", ":", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "True", ")", "except", "Exception", "as", "ex", ":", "logger", ".", "warning", "(", "ex", ")", "logger", ".", "warning", "(", "\"Unable to read wav with memmory mapping. Trying without now.\"", ")", "rate", ",", "data", "=", "scipy", ".", "io", ".", "wavfile", ".", "read", "(", "self", ".", "_fileName", ",", "mmap", "=", "False", ")", "self", ".", "_array", "=", "data", "self", ".", "attributes", "[", "'rate'", "]", "=", "rate" ]
set authorization for command or subcommand .
def cmd_auth ( auth_phrase = None ) : def decorate ( func ) : """ decorates the funcion """ # get the Singleton ap_ = ArgParseInator ( skip_init = True ) # set the authorization name auth_name = id ( func ) if auth_phrase is None : # if we don't have a specific auth_phrase we set the # **authorization needed** to True ap_ . auths [ auth_name ] = True else : # else if we have a specific auth_phrase we set it for the # command authorization ap_ . auths [ auth_name ] = str ( auth_phrase ) return func return decorate
5,111
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L697-L718
[ "def", "create_api_call", "(", "func", ",", "settings", ")", ":", "def", "base_caller", "(", "api_call", ",", "_", ",", "*", "args", ")", ":", "\"\"\"Simply call api_call and ignore settings.\"\"\"", "return", "api_call", "(", "*", "args", ")", "def", "inner", "(", "request", ",", "options", "=", "None", ")", ":", "\"\"\"Invoke with the actual settings.\"\"\"", "this_options", "=", "_merge_options_metadata", "(", "options", ",", "settings", ")", "this_settings", "=", "settings", ".", "merge", "(", "this_options", ")", "if", "this_settings", ".", "retry", "and", "this_settings", ".", "retry", ".", "retry_codes", ":", "api_call", "=", "gax", ".", "retry", ".", "retryable", "(", "func", ",", "this_settings", ".", "retry", ",", "*", "*", "this_settings", ".", "kwargs", ")", "else", ":", "api_call", "=", "gax", ".", "retry", ".", "add_timeout_arg", "(", "func", ",", "this_settings", ".", "timeout", ",", "*", "*", "this_settings", ".", "kwargs", ")", "api_call", "=", "_catch_errors", "(", "api_call", ",", "gax", ".", "config", ".", "API_ERRORS", ")", "return", "api_caller", "(", "api_call", ",", "this_settings", ",", "request", ")", "if", "settings", ".", "page_descriptor", ":", "if", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "raise", "ValueError", "(", "'The API call has incompatible settings: '", "'bundling and page streaming'", ")", "api_caller", "=", "_page_streamable", "(", "settings", ".", "page_descriptor", ")", "elif", "settings", ".", "bundler", "and", "settings", ".", "bundle_descriptor", ":", "api_caller", "=", "_bundleable", "(", "settings", ".", "bundle_descriptor", ")", "else", ":", "api_caller", "=", "base_caller", "return", "inner" ]
Parse our arguments .
def parse_args ( self ) : # compile the parser self . _compile ( ) # clear the args self . args = None self . _self_event ( 'before_parse' , 'parse' , * sys . argv [ 1 : ] , * * { } ) # list commands/subcommands in argv cmds = [ cmd for cmd in sys . argv [ 1 : ] if not cmd . startswith ( "-" ) ] if ( len ( cmds ) > 0 and not utils . check_help ( ) and self . default_cmd and cmds [ 0 ] not in self . commands ) : # if we have at least one command which is not an help command # and we have a default command and the first command in arguments # is not in commands we insert the default command as second # argument (actually the first command) sys . argv . insert ( 1 , self . default_cmd ) # let's parse the arguments self . args = self . parser . parse_args ( ) # set up the output. if self . args : # if we have some arguments if self . add_output and self . args . output is not None : # If add_output is True and we have an output file # setup the encoding self . encoding = self . args . encoding if self . args . encoding . lower ( ) == 'raw' : # if we have passed a raw encoding we will write directly # to the output file. self . _output = open ( self . args . output , self . args . write_mode ) else : # else we will use the codecs module to write to the # output file. import codecs self . _output = codecs . open ( self . args . output , self . args . write_mode , encoding = self . args . encoding ) if self . _cfg_factory : # if we have a config factory setup the config file with the # right param self . cfg_file = self . args . config # now is parsed. self . _is_parsed = True return self
5,112
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L318-L362
[ "def", "dump", "(", "self", ")", ":", "assert", "self", ".", "database", "is", "not", "None", "cmd", "=", "\"SELECT count from {} WHERE rowid={}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "STATE_INFO_ROW", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "self", ".", "_from_sqlite", "(", "ret", "[", "0", "]", "[", "0", "]", ")", "+", "self", ".", "inserts", "if", "count", ">", "self", ".", "row_limit", ":", "msg", "=", "\"cleaning up state, this might take a while.\"", "logger", ".", "warning", "(", "msg", ")", "delete", "=", "count", "-", "self", ".", "row_limit", "delete", "+=", "int", "(", "self", ".", "row_limit", "*", "(", "self", ".", "row_cleanup_quota", "/", "100.0", ")", ")", "cmd", "=", "(", "\"DELETE FROM {} WHERE timestamp IN (\"", "\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"", ")", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ",", "self", ".", "STATE_TABLE", ",", "delete", ")", ")", "self", ".", "_vacuum", "(", ")", "cmd", "=", "\"SELECT COUNT(*) FROM {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_TABLE", ")", ")", "ret", "=", "self", ".", "_fetchall", "(", ")", "assert", "len", "(", "ret", ")", "==", "1", "assert", "len", "(", "ret", "[", "0", "]", ")", "==", "1", "count", "=", "ret", "[", "0", "]", "[", "0", "]", "cmd", "=", "\"UPDATE {} SET count = {} WHERE rowid = {}\"", "self", ".", "_execute", "(", "cmd", ".", "format", "(", "self", ".", "STATE_INFO_TABLE", ",", "self", ".", "_to_sqlite", "(", "count", ")", ",", "self", ".", "STATE_INFO_ROW", ",", ")", ")", "self", ".", "_update_cache_directory_state", "(", ")", "self", ".", "database", ".", "commit", "(", ")", "self", ".", "cursor", ".", "close", "(", ")", "self", ".", "database", ".", "close", "(", ")", "self", ".", "database", "=", "None", "self", ".", "cursor", "=", "None", "self", ".", "inserts", "=", "0" ]
Check the authorization for the command
def check_auth ( self , name ) : if name in self . auths : # if the command name is in the **need authorization list** # get the authorization for the command auth = self . auths [ name ] if self . args . auth is None : # if we didn't pass the authorization phrase raise the # appropriate exception raise exceptions . ArgParseInatorAuthorizationRequired elif ( ( auth is True and self . args . auth != self . auth_phrase ) or ( auth is not True and self . args . auth != auth ) ) : # else if the authorization phrase is wrong raise exceptions . ArgParseInatorNotValidAuthorization return True
5,113
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L364-L380
[ "def", "assertStructIsInline", "(", "self", ",", "obj", ")", ":", "N", ".", "enforce_number", "(", "obj", ",", "N", ".", "UOffsetTFlags", ")", "if", "obj", "!=", "self", ".", "Offset", "(", ")", ":", "msg", "=", "(", "\"flatbuffers: Tried to write a Struct at an Offset that \"", "\"is different from the current Offset of the Builder.\"", ")", "raise", "StructIsNotInlineError", "(", "msg", ")" ]
Check if was passed a valid action in the command line and if so executes it by passing parameters and returning the result .
def check_command ( self , * * new_attributes ) : # let's parse arguments if we didn't before. if not self . _is_parsed : self . parse_args ( ) if not self . commands : # if we don't have commands raise an Exception raise exceptions . ArgParseInatorNoCommandsFound elif self . _single : # if we have a single function we get it directly func = self . _single else : if not self . args . command : self . parser . error ( "too few arguments" ) # get the right command func = self . commands [ self . args . command ] if hasattr ( func , '__subcommands__' ) and func . __subcommands__ : # if we have subcommands get the command from them command = func . __subcommands__ [ self . args . subcommand ] else : # else the command IS the function command = func # get the command name self . cmd_name = command . __cmd_name__ # check authorization if not self . check_auth ( id ( command ) ) : return 0 # let's execute the command. return self . _execute ( func , command , * * new_attributes )
5,114
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L382-L417
[ "def", "set_etag", "(", "self", ",", "etag", ",", "weak", "=", "False", ")", ":", "self", ".", "headers", "[", "\"ETag\"", "]", "=", "quote_etag", "(", "etag", ",", "weak", ")" ]
Try to call events for cmd .
def _call_event ( self , event_name , cmd , pargs , kwargs , * * kws ) : def get_result_params ( res ) : """return the right list of params""" if not isinstance ( res , ( list , tuple ) ) : return res , pargs , kwargs elif len ( res ) == 2 : return res , pargs , kwargs return res [ 0 ] , ( pargs [ 0 ] , ) + tuple ( res [ 1 ] ) , kwargs if hasattr ( cmd , event_name ) : return get_result_params ( getattr ( cmd , event_name ) ( pargs [ 0 ] , * pargs [ 1 : ] , * * kwargs ) ) elif hasattr ( cmd . __cls__ , event_name ) : return get_result_params ( getattr ( cmd . __cls__ , event_name ) ( pargs [ 0 ] , cmd . __cmd_name__ or cmd . __name__ , * pargs [ 1 : ] , * * kwargs ) ) return None , pargs , kwargs
5,115
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L531-L550
[ "def", "replace_dataset", "(", "self", ",", "dataset_key", ",", "*", "*", "kwargs", ")", ":", "request", "=", "self", ".", "__build_dataset_obj", "(", "lambda", ":", "_swagger", ".", "DatasetPutRequest", "(", "title", "=", "kwargs", ".", "get", "(", "'title'", ")", ",", "visibility", "=", "kwargs", ".", "get", "(", "'visibility'", ")", ")", ",", "lambda", "name", ",", "url", ",", "expand_archive", ",", "description", ",", "labels", ":", "_swagger", ".", "FileCreateRequest", "(", "name", "=", "name", ",", "source", "=", "_swagger", ".", "FileSourceCreateRequest", "(", "url", "=", "url", ",", "expand_archive", "=", "expand_archive", ")", ",", "description", "=", "description", ",", "labels", "=", "labels", ")", ",", "kwargs", ")", "owner_id", ",", "dataset_id", "=", "parse_dataset_key", "(", "dataset_key", ")", "try", ":", "self", ".", "_datasets_api", ".", "replace_dataset", "(", "owner_id", ",", "dataset_id", ",", "request", ")", "except", "_swagger", ".", "rest", ".", "ApiException", "as", "e", ":", "raise", "RestApiError", "(", "cause", "=", "e", ")" ]
Call self event
def _self_event ( self , event_name , cmd , * pargs , * * kwargs ) : if hasattr ( self , event_name ) : getattr ( self , event_name ) ( cmd , * pargs , * * kwargs )
5,116
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L552-L555
[ "def", "read_pressure", "(", "self", ")", ":", "UT", "=", "self", ".", "read_raw_temp", "(", ")", "UP", "=", "self", ".", "read_raw_pressure", "(", ")", "# Datasheet values for debugging:", "#UT = 27898", "#UP = 23843", "# Calculations below are taken straight from section 3.5 of the datasheet.", "# Calculate true temperature coefficient B5.", "X1", "=", "(", "(", "UT", "-", "self", ".", "cal_AC6", ")", "*", "self", ".", "cal_AC5", ")", ">>", "15", "X2", "=", "(", "self", ".", "cal_MC", "<<", "11", ")", "//", "(", "X1", "+", "self", ".", "cal_MD", ")", "B5", "=", "X1", "+", "X2", "self", ".", "logger", ".", "debug", "(", "'B5 = {0}'", ",", "B5", ")", "# Pressure Calculations", "B6", "=", "B5", "-", "4000", "self", ".", "logger", ".", "debug", "(", "'B6 = {0}'", ",", "B6", ")", "X1", "=", "(", "self", ".", "cal_B2", "*", "(", "B6", "*", "B6", ")", ">>", "12", ")", ">>", "11", "X2", "=", "(", "self", ".", "cal_AC2", "*", "B6", ")", ">>", "11", "X3", "=", "X1", "+", "X2", "B3", "=", "(", "(", "(", "self", ".", "cal_AC1", "*", "4", "+", "X3", ")", "<<", "self", ".", "_mode", ")", "+", "2", ")", "//", "4", "self", ".", "logger", ".", "debug", "(", "'B3 = {0}'", ",", "B3", ")", "X1", "=", "(", "self", ".", "cal_AC3", "*", "B6", ")", ">>", "13", "X2", "=", "(", "self", ".", "cal_B1", "*", "(", "(", "B6", "*", "B6", ")", ">>", "12", ")", ")", ">>", "16", "X3", "=", "(", "(", "X1", "+", "X2", ")", "+", "2", ")", ">>", "2", "B4", "=", "(", "self", ".", "cal_AC4", "*", "(", "X3", "+", "32768", ")", ")", ">>", "15", "self", ".", "logger", ".", "debug", "(", "'B4 = {0}'", ",", "B4", ")", "B7", "=", "(", "UP", "-", "B3", ")", "*", "(", "50000", ">>", "self", ".", "_mode", ")", "self", ".", "logger", ".", "debug", "(", "'B7 = {0}'", ",", "B7", ")", "if", "B7", "<", "0x80000000", ":", "p", "=", "(", "B7", "*", "2", ")", "//", "B4", "else", ":", "p", "=", "(", "B7", "//", "B4", ")", "*", "2", "X1", "=", "(", "p", ">>", "8", ")", "*", "(", "p", ">>", "8", ")", "X1", "=", "(", "X1", "*", "3038", ")", ">>", "16", "X2", "=", "(", "-", "7357", "*", "p", ")", ">>", "16", "p", "=", "p", "+", "(", "(", "X1", "+", "X2", "+", "3791", ")", ">>", "4", ")", "self", ".", "logger", ".", "debug", "(", "'Pressure {0} Pa'", ",", "p", ")", "return", "p", "/", "100" ]
Writes to the output
def write ( self , * string ) : self . _output . write ( ' ' . join ( [ six . text_type ( s ) for s in string ] ) ) return self
5,117
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L565-L570
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
Terminate the script .
def exit ( self , status = EXIT_OK , message = None ) : if not self . parser : self . parser = argparse . ArgumentParser ( ) if self . msg_on_error_only : # if msg_on_error_only is True if status != EXIT_OK : # if we have an error we'll exit with the message also. self . parser . exit ( status , message ) else : # else we'll exit with the status ongly self . parser . exit ( status , None ) else : # else if msg_on_error_only is not True # we'll exit with the status and the message self . parser . exit ( status , message )
5,118
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__init__.py#L580-L597
[ "def", "delete_user", "(", "self", ",", "user", ")", ":", "assert", "self", ".", "user", "==", "'catroot'", "or", "self", ".", "user", "==", "'postgres'", "assert", "not", "user", "==", "'public'", "con", "=", "self", ".", "connection", "or", "self", ".", "_connect", "(", ")", "cur", "=", "con", ".", "cursor", "(", ")", "cur", ".", "execute", "(", "'DROP SCHEMA {user} CASCADE;'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'REVOKE USAGE ON SCHEMA public FROM {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'REVOKE SELECT ON ALL TABLES IN SCHEMA public FROM {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "cur", ".", "execute", "(", "'DROP ROLE {user};'", ".", "format", "(", "user", "=", "user", ")", ")", "self", ".", "stdout", ".", "write", "(", "'REMOVED USER {user}\\n'", ".", "format", "(", "user", "=", "user", ")", ")", "if", "self", ".", "connection", "is", "None", ":", "con", ".", "commit", "(", ")", "con", ".", "close", "(", ")", "return", "self" ]
Analysis of ligand SASA .
def analyse_ligand_sasa ( self ) : i = 0 start = timer ( ) if self . trajectory == [ ] : self . trajectory = [ self . topology_data . universe . filename ] try : for traj in self . trajectory : new_traj = mdtraj . load ( traj , top = self . topology_data . universe . filename ) #Analyse only non-H ligand ligand_slice = new_traj . atom_slice ( atom_indices = self . topology_data . universe . ligand_noH . ids ) self . sasa = mdtraj . shrake_rupley ( ligand_slice ) self . atom_sasa [ i ] = self . assign_per_atom_sasa ( ) i += 1 self . total_sasa = self . get_total_per_atom_sasa ( ) except KeyError as e : print "WARNING: SASA analysis cannot be performed due to incorrect atom names in" print "the topology " , e print "SASA: " + str ( timer ( ) - start )
5,119
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/sasa.py#L23-L43
[ "def", "restart", "(", "self", ",", "timeout", "=", "None", ")", ":", "msg", "=", "{", "\"value\"", ":", "\"Restart requested by \"", "+", "self", ".", "username", "+", "\"via the Splunk SDK for Python\"", "}", "# This message will be deleted once the server actually restarts.", "self", ".", "messages", ".", "create", "(", "name", "=", "\"restart_required\"", ",", "*", "*", "msg", ")", "result", "=", "self", ".", "post", "(", "\"server/control/restart\"", ")", "if", "timeout", "is", "None", ":", "return", "result", "start", "=", "datetime", ".", "now", "(", ")", "diff", "=", "timedelta", "(", "seconds", "=", "timeout", ")", "while", "datetime", ".", "now", "(", ")", "-", "start", "<", "diff", ":", "try", ":", "self", ".", "login", "(", ")", "if", "not", "self", ".", "restart_required", ":", "return", "result", "except", "Exception", "as", "e", ":", "sleep", "(", "1", ")", "raise", "Exception", "(", "\"Operation time out.\"", ")" ]
Make a dictionary with SASA assigned to each ligand atom stored as list of SASA values over the simulation time .
def assign_per_atom_sasa ( self ) : atom_names = [ atom . name for atom in self . topology_data . universe . ligand_noH . atoms ] sasa_dict = { } for atom in range ( 0 , self . topology_data . universe . ligand_noH . n_atoms ) : sasa_dict [ atom_names [ atom ] ] = [ self . sasa [ i ] [ atom ] for i in range ( len ( self . sasa ) ) ] return sasa_dict
5,120
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/sasa.py#L46-L53
[ "def", "review", "(", "cls", ",", "content", ",", "log", ",", "parent", ",", "window_icon", ")", ":", "# pragma: no cover", "dlg", "=", "DlgReview", "(", "content", ",", "log", ",", "parent", ",", "window_icon", ")", "if", "dlg", ".", "exec_", "(", ")", ":", "return", "dlg", ".", "ui", ".", "edit_main", ".", "toPlainText", "(", ")", ",", "dlg", ".", "ui", ".", "edit_log", ".", "toPlainText", "(", ")", "return", "None", ",", "None" ]
Return average SASA of the atoms .
def get_total_per_atom_sasa ( self ) : total_sasa = defaultdict ( int ) for traj in range ( len ( self . atom_sasa ) ) : for atom in self . atom_sasa [ traj ] : total_sasa [ atom ] += float ( sum ( ( self . atom_sasa [ traj ] [ atom ] ) ) ) / len ( self . atom_sasa [ traj ] [ atom ] ) for atom in total_sasa : total_sasa [ atom ] = float ( total_sasa [ atom ] ) / len ( self . atom_sasa ) return total_sasa
5,121
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/sasa.py#L55-L63
[ "def", "write_result_stream", "(", "result_stream", ",", "filename_prefix", "=", "None", ",", "results_per_file", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "result_stream", ",", "types", ".", "GeneratorType", ")", ":", "stream", "=", "result_stream", "else", ":", "stream", "=", "result_stream", ".", "stream", "(", ")", "file_time_formatter", "=", "\"%Y-%m-%dT%H_%M_%S\"", "if", "filename_prefix", "is", "None", ":", "filename_prefix", "=", "\"twitter_search_results\"", "if", "results_per_file", ":", "logger", ".", "info", "(", "\"chunking result stream to files with {} tweets per file\"", ".", "format", "(", "results_per_file", ")", ")", "chunked_stream", "=", "partition", "(", "stream", ",", "results_per_file", ",", "pad_none", "=", "True", ")", "for", "chunk", "in", "chunked_stream", ":", "chunk", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "chunk", ")", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}_{}.json\"", ".", "format", "(", "filename_prefix", ",", "curr_datetime", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "chunk", ")", "else", ":", "curr_datetime", "=", "(", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "strftime", "(", "file_time_formatter", ")", ")", "_filename", "=", "\"{}.json\"", ".", "format", "(", "filename_prefix", ")", "yield", "from", "write_ndjson", "(", "_filename", ",", "stream", ")" ]
You can choose whether to use lock method when running threads .
def run ( self , * args ) : if self . running : return self self . _mut_finished ( False ) # in case of recovery from a disaster. self . _mut_running ( True ) stream = self . target ( * args ) # noinspection SpellCheckingInspection def subr ( ) : self . _mut_running ( True ) try : for each in stream : self . _product = each desc = self . descriptor_mapping ( each ) event = self . events . get ( desc ) if event : event ( self , each , globals ) self . _mut_finished ( True ) except ThreadExit : pass finally : self . _mut_running ( False ) self . _thread = thread = threading . Thread ( target = subr , args = ( ) ) thread . start ( ) return self
5,122
https://github.com/thautwarm/Redy/blob/8beee5c5f752edfd2754bb1e6b5f4acb016a7770/Redy/Async/Accompany.py#L114-L144
[ "def", "roles_dict", "(", "path", ",", "repo_prefix", "=", "\"\"", ",", "repo_sub_dir", "=", "\"\"", ")", ":", "exit_if_path_not_found", "(", "path", ")", "aggregated_roles", "=", "{", "}", "roles", "=", "os", ".", "walk", "(", "path", ")", ".", "next", "(", ")", "[", "1", "]", "# First scan all directories", "for", "role", "in", "roles", ":", "for", "sub_role", "in", "roles_dict", "(", "path", "+", "\"/\"", "+", "role", ",", "repo_prefix", "=", "\"\"", ",", "repo_sub_dir", "=", "role", "+", "\"/\"", ")", ":", "aggregated_roles", "[", "role", "+", "\"/\"", "+", "sub_role", "]", "=", "role", "+", "\"/\"", "+", "sub_role", "# Then format them", "for", "role", "in", "roles", ":", "if", "is_role", "(", "os", ".", "path", ".", "join", "(", "path", ",", "role", ")", ")", ":", "if", "isinstance", "(", "role", ",", "basestring", ")", ":", "role_repo", "=", "\"{0}{1}\"", ".", "format", "(", "repo_prefix", ",", "role_name", "(", "role", ")", ")", "aggregated_roles", "[", "role", "]", "=", "role_repo", "return", "aggregated_roles" ]
Add the consequences found in all transcripts
def _add_consequences ( self , variant_obj ) : consequences = set ( ) for transcript in variant_obj . transcripts : for consequence in transcript . consequence . split ( '&' ) : consequences . add ( consequence ) variant_obj . consequences = list ( consequences )
5,123
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/gemini/mixins/variant_extras/consequences.py#L8-L20
[ "def", "_post_request", "(", "self", ",", "url", ",", "headers", ",", "data", "=", "None", ")", ":", "# Grab file from data.", "files", "=", "None", "for", "field", ",", "value", "in", "data", ":", "if", "field", "==", "'file'", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "files", "=", "value", "else", ":", "files", "=", "{", "'file'", ":", "value", "}", "break", "# Remove file entry from data.", "data", "[", ":", "]", "=", "[", "tup", "for", "tup", "in", "data", "if", "tup", "[", "0", "]", "!=", "'file'", "]", "return", "self", ".", "_session", ".", "post", "(", "url", ",", "headers", "=", "headers", ",", "data", "=", "data", ",", "files", "=", "files", ")" ]
Add hgnc symbols to the variant If there are transcripts use the symbols found here otherwise use phizz to get the gene ids .
def _add_hgnc_symbols ( self , variant_obj ) : hgnc_symbols = set ( ) if variant_obj . transcripts : for transcript in variant_obj . transcripts : if transcript . hgnc_symbol : hgnc_symbols . add ( transcript . hgnc_symbol ) else : chrom = variant_obj . CHROM start = variant_obj . start stop = variant_obj . stop hgnc_symbols = get_gene_symbols ( chrom , start , stop ) #Make unique ids variant_obj . gene_symbols = list ( hgnc_symbols )
5,124
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/vcf/mixins/variant_extras/genes.py#L7-L25
[ "def", "_integration", "(", "data", ",", "sample_rate", ")", ":", "wind_size", "=", "int", "(", "0.080", "*", "sample_rate", ")", "int_ecg", "=", "numpy", ".", "zeros_like", "(", "data", ")", "cum_sum", "=", "data", ".", "cumsum", "(", ")", "int_ecg", "[", "wind_size", ":", "]", "=", "(", "cum_sum", "[", "wind_size", ":", "]", "-", "cum_sum", "[", ":", "-", "wind_size", "]", ")", "/", "wind_size", "int_ecg", "[", ":", "wind_size", "]", "=", "cum_sum", "[", ":", "wind_size", "]", "/", "numpy", ".", "arange", "(", "1", ",", "wind_size", "+", "1", ")", "return", "int_ecg" ]
Add the Gene objects for a variant
def _add_genes ( self , variant_obj ) : genes = [ ] ensembl_ids = [ ] hgnc_symbols = [ ] if variant_obj . transcripts : for transcript in variant_obj . transcripts : if transcript . ensembl_id : ensembl_ids . append ( transcript . ensembl_id ) if transcript . hgnc_symbol : hgnc_symbols . append ( transcript . hgnc_symbol ) else : hgnc_symbols = variant_obj . gene_symbols genes = get_gene_info ( ensembl_ids = ensembl_ids , hgnc_symbols = hgnc_symbols ) for gene in genes : variant_obj . add_gene ( gene )
5,125
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/vcf/mixins/variant_extras/genes.py#L27-L49
[ "def", "_get_tab", "(", "cls", ")", ":", "if", "not", "cls", ".", "_tabs", "[", "'dec_cobs'", "]", ":", "# Compute the COBS table for decoding", "cls", ".", "_tabs", "[", "'dec_cobs'", "]", "[", "'\\xff'", "]", "=", "(", "255", ",", "''", ")", "cls", ".", "_tabs", "[", "'dec_cobs'", "]", ".", "update", "(", "dict", "(", "(", "chr", "(", "l", ")", ",", "(", "l", ",", "'\\0'", ")", ")", "for", "l", "in", "range", "(", "1", ",", "255", ")", ")", ")", "# Compute the COBS table for encoding", "cls", ".", "_tabs", "[", "'enc_cobs'", "]", "=", "[", "(", "255", ",", "'\\xff'", ")", ",", "dict", "(", "(", "l", ",", "chr", "(", "l", ")", ")", "for", "l", "in", "range", "(", "1", ",", "255", ")", ")", ",", "]", "return", "cls", ".", "_tabs", "[", "'dec_cobs'", "]", ",", "cls", ".", "_tabs", "[", "'enc_cobs'", "]" ]
Strict deco for RedisPrepareCommit
def _redis_strict_pc ( func ) : phase = "session_%s" % func . __name__ @ functools . wraps ( func ) def wrapper ( self , session , * args , * * kwargs ) : try : func ( self , session , * args , * * kwargs ) self . logger . debug ( "%s -> %s" % ( session . meepo_unique_id , phase ) ) return True except Exception as e : if self . strict : raise if isinstance ( e , redis . ConnectionError ) : self . logger . warn ( "redis connection error in %s: %s" % ( phase , session . meepo_unique_id ) ) else : self . logger . exception ( e ) return False return wrapper
5,126
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/prepare_commit.py#L45-L68
[ "def", "chunks", "(", "f", ")", ":", "while", "1", ":", "try", ":", "length", "=", "struct", ".", "unpack", "(", "b\"!I\"", ",", "f", ".", "read", "(", "4", ")", ")", "[", "0", "]", "tag", "=", "f", ".", "read", "(", "4", ")", "data", "=", "f", ".", "read", "(", "length", ")", "crc", "=", "struct", ".", "unpack", "(", "b\"!I\"", ",", "f", ".", "read", "(", "4", ")", ")", "[", "0", "]", "except", "struct", ".", "error", ":", "return", "if", "zlib", ".", "crc32", "(", "tag", "+", "data", ")", "&", "0xFFFFFFFF", "!=", "crc", ":", "raise", "IOError", "(", "'Checksum fail'", ")", "yield", "tag", ",", "data" ]
Determine the session phase in prepare commit .
def phase ( self , session ) : sp_key , _ = self . _keygen ( session ) if self . r . sismember ( sp_key , session . meepo_unique_id ) : return "prepare" else : return "commit"
5,127
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/prepare_commit.py#L113-L123
[ "def", "get_url_metadata", "(", "self", ")", ":", "# Implemented from template for osid.resource.ResourceForm.get_group_metadata_template", "metadata", "=", "dict", "(", "self", ".", "_mdata", "[", "'url'", "]", ")", "metadata", ".", "update", "(", "{", "'existing_string_values'", ":", "self", ".", "_my_map", "[", "'url'", "]", "}", ")", "return", "Metadata", "(", "*", "*", "metadata", ")" ]
Prepare phase for session .
def prepare ( self , session , event ) : if not event : self . logger . warn ( "event empty!" ) return sp_key , sp_hkey = self . _keygen ( session ) def _pk ( obj ) : pk_values = tuple ( getattr ( obj , c . name ) for c in obj . __mapper__ . primary_key ) if len ( pk_values ) == 1 : return pk_values [ 0 ] return pk_values def _get_dump_value ( value ) : if hasattr ( value , '__mapper__' ) : return _pk ( value ) return value pickled_event = { k : pickle . dumps ( { _get_dump_value ( obj ) for obj in objs } ) for k , objs in event . items ( ) } with self . r . pipeline ( transaction = False ) as p : p . sadd ( sp_key , session . meepo_unique_id ) p . hmset ( sp_hkey , pickled_event ) p . execute ( )
5,128
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/prepare_commit.py#L126-L154
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Commit phase for session .
def commit ( self , session ) : sp_key , sp_hkey = self . _keygen ( session ) with self . r . pipeline ( transaction = False ) as p : p . srem ( sp_key , session . meepo_unique_id ) p . expire ( sp_hkey , 60 * 60 ) p . execute ( )
5,129
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/prepare_commit.py#L157-L166
[ "def", "external_metadata", "(", "self", ",", "datasource_type", "=", "None", ",", "datasource_id", "=", "None", ")", ":", "if", "datasource_type", "==", "'druid'", ":", "datasource", "=", "ConnectorRegistry", ".", "get_datasource", "(", "datasource_type", ",", "datasource_id", ",", "db", ".", "session", ")", "elif", "datasource_type", "==", "'table'", ":", "database", "=", "(", "db", ".", "session", ".", "query", "(", "Database", ")", ".", "filter_by", "(", "id", "=", "request", ".", "args", ".", "get", "(", "'db_id'", ")", ")", ".", "one", "(", ")", ")", "Table", "=", "ConnectorRegistry", ".", "sources", "[", "'table'", "]", "datasource", "=", "Table", "(", "database", "=", "database", ",", "table_name", "=", "request", ".", "args", ".", "get", "(", "'table_name'", ")", ",", "schema", "=", "request", ".", "args", ".", "get", "(", "'schema'", ")", "or", "None", ",", ")", "external_metadata", "=", "datasource", ".", "external_metadata", "(", ")", "return", "self", ".", "json_response", "(", "external_metadata", ")" ]
Clear all session in prepare phase .
def clear ( self , ts = None ) : sp_key = "%s:session_prepare" % self . namespace ( ts or int ( time . time ( ) ) ) return self . r . delete ( sp_key )
5,130
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/prepare_commit.py#L190-L196
[ "def", "trace_max_buffer_capacity", "(", "self", ")", ":", "cmd", "=", "enums", ".", "JLinkTraceCommand", ".", "GET_MAX_CAPACITY", "data", "=", "ctypes", ".", "c_uint32", "(", "0", ")", "res", "=", "self", ".", "_dll", ".", "JLINKARM_TRACE_Control", "(", "cmd", ",", "ctypes", ".", "byref", "(", "data", ")", ")", "if", "(", "res", "==", "1", ")", ":", "raise", "errors", ".", "JLinkException", "(", "'Failed to get max trace buffer size.'", ")", "return", "data", ".", "value" ]
Show all cases in the database .
def cases ( ctx , root ) : root = root or ctx . obj . get ( 'root' ) or os . path . expanduser ( "~/.puzzle" ) if os . path . isfile ( root ) : logger . error ( "'root' can't be a file" ) ctx . abort ( ) logger . info ( "Root directory is: {}" . format ( root ) ) db_path = os . path . join ( root , 'puzzle_db.sqlite3' ) logger . info ( "db path is: {}" . format ( db_path ) ) if not os . path . exists ( db_path ) : logger . warn ( "database not initialized, run 'puzzle init'" ) ctx . abort ( ) store = SqlStore ( db_path ) for case in store . cases ( ) : click . echo ( case )
5,131
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/cli/cases.py#L16-L39
[ "def", "augment_audio_with_sox", "(", "path", ",", "sample_rate", ",", "tempo", ",", "gain", ")", ":", "with", "NamedTemporaryFile", "(", "suffix", "=", "\".wav\"", ")", "as", "augmented_file", ":", "augmented_filename", "=", "augmented_file", ".", "name", "sox_augment_params", "=", "[", "\"tempo\"", ",", "\"{:.3f}\"", ".", "format", "(", "tempo", ")", ",", "\"gain\"", ",", "\"{:.3f}\"", ".", "format", "(", "gain", ")", "]", "sox_params", "=", "\"sox \\\"{}\\\" -r {} -c 1 -b 16 {} {} >/dev/null 2>&1\"", ".", "format", "(", "path", ",", "sample_rate", ",", "augmented_filename", ",", "\" \"", ".", "join", "(", "sox_augment_params", ")", ")", "os", ".", "system", "(", "sox_params", ")", "y", "=", "load_audio", "(", "augmented_filename", ")", "return", "y" ]
Creates a standalone subprojects or submodules script sctrucure
def init ( name , subnames , dest , skeleton , description , project_type , skip_core ) : dest = dest or CUR_DIR skeleton = join ( skeleton or SKEL_PATH , project_type ) project = join ( dest , name ) script = join ( project , name + '.py' ) core = join ( project , name ) if project_type == 'standalone' : renames = [ ( join ( project , 'project.py' ) , script ) , ( join ( project , 'project' ) , core ) ] copy_skeleton ( name , skeleton , project , renames = renames , description = description , ignore = False ) else : renames = [ ( join ( project , 'project.py' ) , script ) , ( join ( project , 'project' ) , core ) ] exclude_dirs = [ 'submodule' ] + ( [ 'project' ] if skip_core else [ ] ) copy_skeleton ( name , skeleton , project , renames = renames , description = description , exclude_dirs = exclude_dirs , ignore = True ) for subname in subnames : renames = [ ( join ( project , 'submodule' ) , join ( project , subname ) ) ] copy_skeleton ( subname , skeleton , project , renames = renames , description = description , ignore = True , exclude_dirs = [ 'project' ] , exclude_files = [ 'project.py' ] ) return 0 , "\n{}\n" . format ( project )
5,132
https://github.com/ellethee/argparseinator/blob/05e9c00dfaa938b9c4ee2aadc6206f5e0918e24e/argparseinator/__main__.py#L115-L145
[ "def", "__getBio", "(", "self", ",", "web", ")", ":", "bio", "=", "web", ".", "find_all", "(", "\"div\"", ",", "{", "\"class\"", ":", "\"user-profile-bio\"", "}", ")", "if", "bio", ":", "try", ":", "bio", "=", "bio", "[", "0", "]", ".", "text", "if", "bio", "and", "GitHubUser", ".", "isASCII", "(", "bio", ")", ":", "bioText", "=", "bio", ".", "replace", "(", "\"\\n\"", ",", "\"\"", ")", "bioText", "=", "bioText", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", ".", "replace", "(", "\"\\\"\"", ",", "\"\"", ")", "bioText", "=", "bioText", ".", "replace", "(", "\"\\'\"", ",", "\"\"", ")", ".", "replace", "(", "\"\\\\\"", ",", "\"\"", ")", "self", ".", "bio", "=", "bioText", "else", ":", "self", ".", "bio", "=", "\"\"", "except", "IndexError", "as", "error", ":", "print", "(", "\"There was an error with the user \"", "+", "self", ".", "name", ")", "print", "(", "error", ")", "except", "AttributeError", "as", "error", ":", "print", "(", "\"There was an error with the user \"", "+", "self", ".", "name", ")", "print", "(", "error", ")" ]
Assert the internal consistency of the instance s data structures . This method is for debugging only .
def _check ( self ) : for k , ix in six . iteritems ( self . _indices ) : assert k is not None , 'null key' assert ix , 'Key does not map to any indices' assert ix == sorted ( ix ) , "Key's indices are not in order" for i in ix : assert i in self . _lines , 'Key index does not map to line' assert self . _lines [ i ] . key is not None , 'Key maps to comment' assert self . _lines [ i ] . key == k , 'Key does not map to itself' assert self . _lines [ i ] . value is not None , 'Key has null value' prev = None for i , line in six . iteritems ( self . _lines ) : assert prev is None or prev < i , 'Line indices out of order' prev = i if line . key is None : assert line . value is None , 'Comment/blank has value' assert line . source is not None , 'Comment source not stored' assert loads ( line . source ) == { } , 'Comment source is not comment' else : assert line . value is not None , 'Key has null value' if line . source is not None : assert loads ( line . source ) == { line . key : line . value } , 'Key source does not deserialize to itself' assert line . key in self . _indices , 'Key is missing from map' assert i in self . _indices [ line . key ] , 'Key does not map to itself'
5,133
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L64-L93
[ "def", "read_wait_cell", "(", "self", ")", ":", "table_state", "=", "self", ".", "bt_table", ".", "read_row", "(", "TABLE_STATE", ",", "filter_", "=", "bigtable_row_filters", ".", "ColumnRangeFilter", "(", "METADATA", ",", "WAIT_CELL", ",", "WAIT_CELL", ")", ")", "if", "table_state", "is", "None", ":", "utils", ".", "dbg", "(", "'No waiting for new games needed; '", "'wait_for_game_number column not in table_state'", ")", "return", "None", "value", "=", "table_state", ".", "cell_value", "(", "METADATA", ",", "WAIT_CELL", ")", "if", "not", "value", ":", "utils", ".", "dbg", "(", "'No waiting for new games needed; '", "'no value in wait_for_game_number cell '", "'in table_state'", ")", "return", "None", "return", "cbt_intvalue", "(", "value", ")" ]
Parse the contents of the ~io . IOBase . readline - supporting file - like object fp as a simple line - oriented . properties file and return a PropertiesFile instance .
def load ( cls , fp ) : obj = cls ( ) for i , ( k , v , src ) in enumerate ( parse ( fp ) ) : if k is not None : obj . _indices . setdefault ( k , [ ] ) . append ( i ) obj . _lines [ i ] = PropertyLine ( k , v , src ) return obj
5,134
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L170-L195
[ "def", "validate_contribution", "(", "the_con", ")", ":", "passing", "=", "True", "for", "dtype", "in", "list", "(", "the_con", ".", "tables", ".", "keys", "(", ")", ")", ":", "print", "(", "\"validating {}\"", ".", "format", "(", "dtype", ")", ")", "fail", "=", "validate_table", "(", "the_con", ",", "dtype", ")", "if", "fail", ":", "passing", "=", "False", "print", "(", "'--'", ")" ]
Parse the contents of the string s as a simple line - oriented . properties file and return a PropertiesFile instance .
def loads ( cls , s ) : if isinstance ( s , six . binary_type ) : fp = six . BytesIO ( s ) else : fp = six . StringIO ( s ) return cls . load ( fp )
5,135
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L198-L220
[ "def", "original_failure", "(", "self", ")", ":", "try", ":", "result", "=", "self", ".", "_result", "(", "0.0", ")", "except", "TimeoutError", ":", "return", "None", "if", "isinstance", "(", "result", ",", "Failure", ")", ":", "return", "result", "else", ":", "return", "None" ]
Write the mapping to a file in simple line - oriented . properties format .
def dump ( self , fp , separator = '=' ) : ### TODO: Support setting the timestamp for line in six . itervalues ( self . _lines ) : if line . source is None : print ( join_key_value ( line . key , line . value , separator ) , file = fp ) else : fp . write ( line . source )
5,136
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L222-L256
[ "async", "def", "get_creds_by_id", "(", "self", ",", "proof_req_json", ":", "str", ",", "cred_ids", ":", "set", ")", "->", "str", ":", "LOGGER", ".", "debug", "(", "'HolderProver.get_creds_by_id >>> proof_req_json: %s, cred_ids: %s'", ",", "proof_req_json", ",", "cred_ids", ")", "creds_json", "=", "await", "anoncreds", ".", "prover_get_credentials_for_proof_req", "(", "self", ".", "wallet", ".", "handle", ",", "proof_req_json", ")", "# retain only creds of interest: find corresponding referents", "rv_json", "=", "prune_creds_json", "(", "json", ".", "loads", "(", "creds_json", ")", ",", "cred_ids", ")", "LOGGER", ".", "debug", "(", "'HolderProver.get_cred_by_referent <<< %s'", ",", "rv_json", ")", "return", "rv_json" ]
Convert the mapping to a text string in simple line - oriented . properties format .
def dumps ( self , separator = '=' ) : s = six . StringIO ( ) self . dump ( s , separator = separator ) return s . getvalue ( )
5,137
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L258-L287
[ "def", "validate_submission", "(", "self", ",", "filename", ")", ":", "self", ".", "_prepare_temp_dir", "(", ")", "# Convert filename to be absolute path, relative path might cause problems", "# with mounting directory in Docker", "filename", "=", "os", ".", "path", ".", "abspath", "(", "filename", ")", "# extract submission", "if", "not", "self", ".", "_extract_submission", "(", "filename", ")", ":", "return", "None", "# verify submission size", "if", "not", "self", ".", "_verify_submission_size", "(", ")", ":", "return", "None", "# Load metadata", "metadata", "=", "self", ".", "_load_and_verify_metadata", "(", ")", "if", "not", "metadata", ":", "return", "None", "submission_type", "=", "metadata", "[", "'type'", "]", "# verify docker container size", "if", "not", "self", ".", "_verify_docker_image_size", "(", "metadata", "[", "'container_gpu'", "]", ")", ":", "return", "None", "# Try to run submission on sample data", "self", ".", "_prepare_sample_data", "(", "submission_type", ")", "if", "not", "self", ".", "_run_submission", "(", "metadata", ")", ":", "logging", ".", "error", "(", "'Failure while running submission'", ")", "return", "None", "if", "not", "self", ".", "_verify_output", "(", "submission_type", ")", ":", "logging", ".", "warning", "(", "'Some of the outputs of your submission are invalid or '", "'missing. You submission still will be evaluation '", "'but you might get lower score.'", ")", "return", "metadata" ]
Create a copy of the mapping including formatting information
def copy ( self ) : dup = type ( self ) ( ) dup . _indices = OrderedDict ( ( k , list ( v ) ) for k , v in six . iteritems ( self . _indices ) ) dup . _lines = self . _lines . copy ( ) return dup
5,138
https://github.com/jwodder/javaproperties/blob/8b48f040305217ebeb80c98c4354691bbb01429b/javaproperties/propfile.py#L289-L296
[ "def", "write_new_expr_id", "(", "self", ",", "search_group", ",", "search", ",", "lars_id", ",", "instruments", ",", "gps_start_time", ",", "gps_end_time", ",", "comments", "=", "None", ")", ":", "# check if id already exists", "check_id", "=", "self", ".", "get_expr_id", "(", "search_group", ",", "search", ",", "lars_id", ",", "instruments", ",", "gps_start_time", ",", "gps_end_time", ",", "comments", "=", "comments", ")", "if", "check_id", ":", "return", "check_id", "# experiment not found in table", "row", "=", "self", ".", "RowType", "(", ")", "row", ".", "experiment_id", "=", "self", ".", "get_next_id", "(", ")", "row", ".", "search_group", "=", "search_group", "row", ".", "search", "=", "search", "row", ".", "lars_id", "=", "lars_id", "row", ".", "instruments", "=", "ifos_from_instrument_set", "(", "instruments", ")", "row", ".", "gps_start_time", "=", "gps_start_time", "row", ".", "gps_end_time", "=", "gps_end_time", "row", ".", "comments", "=", "comments", "self", ".", "append", "(", "row", ")", "# return new ID", "return", "row", ".", "experiment_id" ]
Create and normalize a vector across ring plane .
def prepare_normal_vectors ( atomselection ) : ring_atomselection = [ atomselection . coordinates ( ) [ a ] for a in [ 0 , 2 , 4 ] ] vect1 = self . vector ( ring_atomselection [ 0 ] , ring_atomselection [ 1 ] ) vect2 = self . vector ( ring_atomselection [ 2 ] , ring_atomselection [ 0 ] ) return self . normalize_vector ( np . cross ( vect1 , vect2 ) )
5,139
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/analysis/maths_functions.py#L3-L8
[ "def", "update_security_of_password", "(", "self", ",", "ID", ",", "data", ")", ":", "# http://teampasswordmanager.com/docs/api-passwords/#update_security_password", "log", ".", "info", "(", "'Update security of password %s with %s'", "%", "(", "ID", ",", "data", ")", ")", "self", ".", "put", "(", "'passwords/%s/security.json'", "%", "ID", ",", "data", ")" ]
Decorator to use on methods that are allowed to retry the request after reauthenticating the client .
def refresh_session_if_necessary ( f ) : @ functools . wraps ( f ) def wrapped ( self , * args , * * kwargs ) : try : result = f ( self , * args , * * kwargs ) except Exception as ex : if hasattr ( ex , 'code' ) and ex . code in ( 401 , 403 ) : self . refresh_session ( ) # retry now result = f ( self , * args , * * kwargs ) else : raise ex return result return wrapped
5,140
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/helpers.py#L4-L27
[ "def", "set_category", "(", "self", ",", "category", ")", ":", "pcategory", "=", "self", ".", "find", "(", "\"general/category\"", ")", "pcategory", ".", "clear", "(", ")", "name", "=", "ElementTree", ".", "SubElement", "(", "pcategory", ",", "\"name\"", ")", "if", "isinstance", "(", "category", ",", "Category", ")", ":", "id_", "=", "ElementTree", ".", "SubElement", "(", "pcategory", ",", "\"id\"", ")", "id_", ".", "text", "=", "category", ".", "id", "name", ".", "text", "=", "category", ".", "name", "elif", "isinstance", "(", "category", ",", "basestring", ")", ":", "name", ".", "text", "=", "category" ]
Build the sqlite database
def init_db ( db_path ) : logger . info ( "Creating database" ) with closing ( connect_database ( db_path ) ) as db : with open ( SCHEMA , 'r' ) as f : db . cursor ( ) . executescript ( f . read ( ) ) db . commit ( ) return
5,141
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/utils/puzzle_database.py#L15-L22
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Populates result members .
def merge ( self ) : self . merged_root = self . _recursive_merge ( self . root , self . head , self . update ) if self . conflicts : raise MergeError ( 'Conflicts Occurred in Merge Process' , self . conflicts )
5,142
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/merger.py#L135-L224
[ "def", "pressure", "(", "self", ")", ":", "self", ".", "_read_temperature", "(", ")", "# Algorithm from the BME280 driver", "# https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c", "adc", "=", "self", ".", "_read24", "(", "_BME280_REGISTER_PRESSUREDATA", ")", "/", "16", "# lowest 4 bits get dropped", "var1", "=", "float", "(", "self", ".", "_t_fine", ")", "/", "2.0", "-", "64000.0", "var2", "=", "var1", "*", "var1", "*", "self", ".", "_pressure_calib", "[", "5", "]", "/", "32768.0", "var2", "=", "var2", "+", "var1", "*", "self", ".", "_pressure_calib", "[", "4", "]", "*", "2.0", "var2", "=", "var2", "/", "4.0", "+", "self", ".", "_pressure_calib", "[", "3", "]", "*", "65536.0", "var3", "=", "self", ".", "_pressure_calib", "[", "2", "]", "*", "var1", "*", "var1", "/", "524288.0", "var1", "=", "(", "var3", "+", "self", ".", "_pressure_calib", "[", "1", "]", "*", "var1", ")", "/", "524288.0", "var1", "=", "(", "1.0", "+", "var1", "/", "32768.0", ")", "*", "self", ".", "_pressure_calib", "[", "0", "]", "if", "var1", "==", "0", ":", "return", "0", "if", "var1", ":", "pressure", "=", "1048576.0", "-", "adc", "pressure", "=", "(", "(", "pressure", "-", "var2", "/", "4096.0", ")", "*", "6250.0", ")", "/", "var1", "var1", "=", "self", ".", "_pressure_calib", "[", "8", "]", "*", "pressure", "*", "pressure", "/", "2147483648.0", "var2", "=", "pressure", "*", "self", ".", "_pressure_calib", "[", "7", "]", "/", "32768.0", "pressure", "=", "pressure", "+", "(", "var1", "+", "var2", "+", "self", ".", "_pressure_calib", "[", "6", "]", ")", "/", "16.0", "pressure", "/=", "100", "if", "pressure", "<", "_BME280_PRESSURE_MIN_HPA", ":", "return", "_BME280_PRESSURE_MIN_HPA", "if", "pressure", ">", "_BME280_PRESSURE_MAX_HPA", ":", "return", "_BME280_PRESSURE_MAX_HPA", "return", "pressure", "else", ":", "return", "_BME280_PRESSURE_MIN_HPA" ]
Return list of HGNC symbols matching HPO phenotype ids .
def hpo_genes ( phenotype_ids , username , password ) : if phenotype_ids : try : results = query_phenomizer . query ( username , password , phenotype_ids ) return [ result for result in results if result [ 'p_value' ] is not None ] except SystemExit , RuntimeError : pass return None
5,143
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/utils/phenomizer.py#L5-L34
[ "def", "to_td", "(", "frame", ",", "name", ",", "con", ",", "if_exists", "=", "'fail'", ",", "time_col", "=", "None", ",", "time_index", "=", "None", ",", "index", "=", "True", ",", "index_label", "=", "None", ",", "chunksize", "=", "10000", ",", "date_format", "=", "None", ")", ":", "database", ",", "table", "=", "name", ".", "split", "(", "'.'", ")", "uploader", "=", "StreamingUploader", "(", "con", ".", "client", ",", "database", ",", "table", ",", "show_progress", "=", "True", ",", "clear_progress", "=", "True", ")", "uploader", ".", "message", "(", "'Streaming import into: {0}.{1}'", ".", "format", "(", "database", ",", "table", ")", ")", "# check existence", "if", "if_exists", "==", "'fail'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "RuntimeError", "(", "'table \"%s\" already exists'", "%", "name", ")", "elif", "if_exists", "==", "'replace'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "pass", "else", ":", "uploader", ".", "message", "(", "'deleting old table...'", ")", "con", ".", "client", ".", "delete_table", "(", "database", ",", "table", ")", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "elif", "if_exists", "==", "'append'", ":", "try", ":", "con", ".", "client", ".", "table", "(", "database", ",", "table", ")", "except", "tdclient", ".", "api", ".", "NotFoundError", ":", "uploader", ".", "message", "(", "'creating new table...'", ")", "con", ".", "client", ".", "create_log_table", "(", "database", ",", "table", ")", "else", ":", "raise", "ValueError", "(", "'invalid value for if_exists: %s'", "%", "if_exists", ")", "# \"time_index\" implies \"index=False\"", "if", "time_index", ":", "index", "=", "None", "# convert", "frame", "=", "frame", ".", "copy", "(", ")", "frame", "=", "_convert_time_column", "(", "frame", ",", "time_col", ",", "time_index", ")", "frame", "=", "_convert_index_column", "(", "frame", ",", "index", ",", "index_label", ")", "frame", "=", "_convert_date_format", "(", "frame", ",", "date_format", ")", "# upload", "uploader", ".", "upload_frame", "(", "frame", ",", "chunksize", ")", "uploader", ".", "wait_for_import", "(", "len", "(", "frame", ")", ")" ]
Utility to monkeypatch forms into paperinputs untested
def mangle_form ( form ) : for field , widget in form . fields . iteritems ( ) : if type ( widget ) is forms . widgets . TextInput : form . fields [ field ] . widget = PaperTextInput ( ) form . fields [ field ] . label = '' if type ( widget ) is forms . widgets . PasswordInput : field . widget = PaperPasswordInput ( ) field . label = '' return form
5,144
https://github.com/Colorless-Green-Ideas/MaterialDjango/blob/e7a69e968965d25198d90318623a828cff67f5dc/materialdjango/forms.py#L14-L23
[ "def", "get_event_source", "(", "prefix", ")", ":", "source", "=", "LRR_EVENT_TYPE", ".", "UNKNOWN", "if", "prefix", "==", "'CID'", ":", "source", "=", "LRR_EVENT_TYPE", ".", "CID", "elif", "prefix", "==", "'DSC'", ":", "source", "=", "LRR_EVENT_TYPE", ".", "DSC", "elif", "prefix", "==", "'AD2'", ":", "source", "=", "LRR_EVENT_TYPE", ".", "ALARMDECODER", "elif", "prefix", "==", "'ADEMCO'", ":", "source", "=", "LRR_EVENT_TYPE", ".", "ADEMCO", "return", "source" ]
Generate redis key for event at timestamp .
def _keygen ( self , event , ts = None ) : return "%s:%s" % ( self . namespace ( ts or time . time ( ) ) , event )
5,145
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L140-L146
[ "def", "hide", "(", "self", ",", "selections", ")", ":", "if", "'atoms'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'atoms'", "]", "=", "selections", "[", "'atoms'", "]", "self", ".", "on_atom_hidden_changed", "(", ")", "if", "'bonds'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'bonds'", "]", "=", "selections", "[", "'bonds'", "]", "self", ".", "on_bond_hidden_changed", "(", ")", "if", "'box'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'box'", "]", "=", "box_s", "=", "selections", "[", "'box'", "]", "if", "box_s", ".", "mask", "[", "0", "]", ":", "if", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "remove_renderer", "(", "self", ".", "box_renderer", ")", "else", ":", "if", "not", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "add_renderer", "(", "self", ".", "box_renderer", ")", "return", "self", ".", "hidden_state" ]
Redis lua func to add an event to the corresponding sorted set .
def _zadd ( self , key , pk , ts = None , ttl = None ) : return self . r . eval ( self . LUA_ZADD , 1 , key , ts or self . _time ( ) , pk )
5,146
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L154-L163
[ "def", "inspect_workers", "(", "self", ")", ":", "workers", "=", "tuple", "(", "self", ".", "workers", ".", "values", "(", ")", ")", "expired", "=", "tuple", "(", "w", "for", "w", "in", "workers", "if", "not", "w", ".", "is_alive", "(", ")", ")", "for", "worker", "in", "expired", ":", "self", ".", "workers", ".", "pop", "(", "worker", ".", "pid", ")", "return", "(", "(", "w", ".", "pid", ",", "w", ".", "exitcode", ")", "for", "w", "in", "expired", "if", "w", ".", "exitcode", "!=", "0", ")" ]
Add an event to event store .
def add ( self , event , pk , ts = None , ttl = None ) : key = self . _keygen ( event , ts ) try : self . _zadd ( key , pk , ts , ttl ) return True except redis . ConnectionError as e : # connection error typically happens when redis server can't be # reached or timed out, the error will be silent with an error # log and return None. self . logger . error ( "redis event store failed with connection error %r" % e ) return False
5,147
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L165-L188
[ "def", "render_template", "(", "template", ")", ":", "def", "outer_wrapper", "(", "callable_or_dict", "=", "None", ",", "statuscode", "=", "None", ",", "*", "*", "kwargs", ")", ":", "def", "wrapper", "(", "request", ",", "*", "args", ",", "*", "*", "wrapper_kwargs", ")", ":", "if", "callable", "(", "callable_or_dict", ")", ":", "params", "=", "callable_or_dict", "(", "request", ",", "*", "args", ",", "*", "*", "wrapper_kwargs", ")", "else", ":", "params", "=", "callable_or_dict", "# If we want to return some other response type we can,", "# that simply overrides the default behavior", "if", "params", "is", "None", "or", "isinstance", "(", "params", ",", "dict", ")", ":", "resp", "=", "render", "(", "request", ",", "template", ",", "params", ",", "*", "*", "kwargs", ")", "else", ":", "resp", "=", "params", "if", "statuscode", ":", "resp", ".", "status_code", "=", "statuscode", "return", "resp", "return", "wrapper", "return", "outer_wrapper" ]
Replay events based on timestamp .
def replay ( self , event , ts = 0 , end_ts = None , with_ts = False ) : key = self . _keygen ( event , ts ) end_ts = end_ts if end_ts else "+inf" elements = self . r . zrangebyscore ( key , ts , end_ts , withscores = with_ts ) if not with_ts : return [ s ( e ) for e in elements ] else : return [ ( s ( e [ 0 ] ) , int ( e [ 1 ] ) ) for e in elements ]
5,148
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L190-L210
[ "def", "forbidden", "(", "request", ")", ":", "template", "=", "pkg_resources", ".", "resource_string", "(", "'pyramid_persona'", ",", "'templates/forbidden.html'", ")", ".", "decode", "(", ")", "html", "=", "template", "%", "{", "'js'", ":", "request", ".", "persona_js", ",", "'button'", ":", "request", ".", "persona_button", "}", "return", "Response", "(", "html", ",", "status", "=", "'403 Forbidden'", ")" ]
Query the last update timestamp of an event pk .
def query ( self , event , pk , ts = None ) : key = self . _keygen ( event , ts ) pk_ts = self . r . zscore ( key , pk ) return int ( pk_ts ) if pk_ts else None
5,149
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L212-L225
[ "def", "hide", "(", "self", ",", "selections", ")", ":", "if", "'atoms'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'atoms'", "]", "=", "selections", "[", "'atoms'", "]", "self", ".", "on_atom_hidden_changed", "(", ")", "if", "'bonds'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'bonds'", "]", "=", "selections", "[", "'bonds'", "]", "self", ".", "on_bond_hidden_changed", "(", ")", "if", "'box'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'box'", "]", "=", "box_s", "=", "selections", "[", "'box'", "]", "if", "box_s", ".", "mask", "[", "0", "]", ":", "if", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "remove_renderer", "(", "self", ".", "box_renderer", ")", "else", ":", "if", "not", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "add_renderer", "(", "self", ".", "box_renderer", ")", "return", "self", ".", "hidden_state" ]
Clear all stored record of event .
def clear ( self , event , ts = None ) : return self . r . delete ( self . _keygen ( event , ts ) )
5,150
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/event_store.py#L227-L233
[ "def", "generate_http_manifest", "(", "self", ")", ":", "base_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "translate_path", "(", "self", ".", "path", ")", ")", "self", ".", "dataset", "=", "dtoolcore", ".", "DataSet", ".", "from_uri", "(", "base_path", ")", "admin_metadata_fpath", "=", "os", ".", "path", ".", "join", "(", "base_path", ",", "\".dtool\"", ",", "\"dtool\"", ")", "with", "open", "(", "admin_metadata_fpath", ")", "as", "fh", ":", "admin_metadata", "=", "json", ".", "load", "(", "fh", ")", "http_manifest", "=", "{", "\"admin_metadata\"", ":", "admin_metadata", ",", "\"manifest_url\"", ":", "self", ".", "generate_url", "(", "\".dtool/manifest.json\"", ")", ",", "\"readme_url\"", ":", "self", ".", "generate_url", "(", "\"README.yml\"", ")", ",", "\"overlays\"", ":", "self", ".", "generate_overlay_urls", "(", ")", ",", "\"item_urls\"", ":", "self", ".", "generate_item_urls", "(", ")", "}", "return", "bytes", "(", "json", ".", "dumps", "(", "http_manifest", ")", ",", "\"utf-8\"", ")" ]
Register a file path from which to read parameter values .
def add_configuration_file ( self , file_name ) : logger . info ( 'adding %s to configuration files' , file_name ) if file_name not in self . configuration_files and self . _inotify : self . _watch_manager . add_watch ( file_name , pyinotify . IN_MODIFY ) if os . access ( file_name , os . R_OK ) : self . configuration_files [ file_name ] = SafeConfigParser ( ) self . configuration_files [ file_name ] . read ( file_name ) else : logger . warn ( 'could not read %s' , file_name ) warnings . warn ( 'could not read {}' . format ( file_name ) , ResourceWarning )
5,151
https://github.com/alunduil/crumbs/blob/94b23f45db3054000d16968a44400780c6cff5ba/crumbs/__init__.py#L328-L354
[ "def", "delete_unit", "(", "unit_id", ",", "*", "*", "kwargs", ")", ":", "try", ":", "db_unit", "=", "db", ".", "DBSession", ".", "query", "(", "Unit", ")", ".", "filter", "(", "Unit", ".", "id", "==", "unit_id", ")", ".", "one", "(", ")", "db", ".", "DBSession", ".", "delete", "(", "db_unit", ")", "db", ".", "DBSession", ".", "flush", "(", ")", "return", "True", "except", "NoResultFound", ":", "raise", "ResourceNotFoundError", "(", "\"Unit (ID=%s) does not exist\"", "%", "(", "unit_id", ")", ")" ]
Add the parameter to Parameters .
def add_parameter ( self , * * kwargs ) : parameter_name = max ( kwargs [ 'options' ] , key = len ) . lstrip ( '-' ) if 'dest' in kwargs : parameter_name = kwargs [ 'dest' ] group = kwargs . pop ( 'group' , 'default' ) self . groups . add ( group ) parameter_name = '.' . join ( [ group , parameter_name ] ) . lstrip ( '.' ) . replace ( '-' , '_' ) logger . info ( 'adding parameter %s' , parameter_name ) if self . parsed : logger . warn ( 'adding parameter %s after parse' , parameter_name ) warnings . warn ( 'adding parameter {} after parse' . format ( parameter_name ) , RuntimeWarning ) self . parameters [ parameter_name ] = copy . copy ( kwargs ) self . parameters [ parameter_name ] [ 'group' ] = group self . parameters [ parameter_name ] [ 'type' ] = kwargs . get ( 'type' , str ) self . parameters [ parameter_name ] [ 'environment_prefix' ] = kwargs . pop ( 'environment_prefix' , os . path . basename ( sys . argv [ 0 ] ) ) if self . parameters [ parameter_name ] [ 'environment_prefix' ] is not None : self . parameters [ parameter_name ] [ 'environment_prefix' ] = self . parameters [ parameter_name ] [ 'environment_prefix' ] . upper ( ) . replace ( '-' , '_' ) logger . info ( 'group: %s' , group ) self . grouped_parameters . setdefault ( group , { } ) . setdefault ( parameter_name . replace ( group + '.' , '' ) , self . parameters [ parameter_name ] ) action_defaults = { 'store' : kwargs . get ( 'default' ) , 'store_const' : kwargs . get ( 'const' ) , 'store_true' : False , 'store_false' : True , 'append' : [ ] , 'append_const' : [ ] , 'count' : 0 , } self . defaults [ parameter_name ] = action_defaults [ kwargs . get ( 'action' , 'store' ) ] logger . info ( 'default value: %s' , kwargs . get ( 'default' ) ) if 'argument' in kwargs . pop ( 'only' , [ 'argument' ] ) : if group not in self . _group_parsers : self . _group_parsers [ group ] = self . _group_parsers [ 'default' ] . add_argument_group ( group ) if self . _group_prefix and group != 'default' : long_option = max ( kwargs [ 'options' ] , key = len ) kwargs [ 'options' ] . remove ( long_option ) kwargs [ 'options' ] . append ( long_option . replace ( '--' , '--' + group . replace ( '_' , '-' ) + '-' ) ) logger . debug ( 'options: %s' , kwargs [ 'options' ] ) self . _group_parsers [ group ] . add_argument ( * kwargs . pop ( 'options' ) , * * kwargs )
5,152
https://github.com/alunduil/crumbs/blob/94b23f45db3054000d16968a44400780c6cff5ba/crumbs/__init__.py#L356-L464
[ "def", "_extract_conjuction_elements_from_expression", "(", "expression", ")", ":", "if", "isinstance", "(", "expression", ",", "BinaryComposition", ")", "and", "expression", ".", "operator", "==", "u'&&'", ":", "for", "element", "in", "_extract_conjuction_elements_from_expression", "(", "expression", ".", "left", ")", ":", "yield", "element", "for", "element", "in", "_extract_conjuction_elements_from_expression", "(", "expression", ".", "right", ")", ":", "yield", "element", "else", ":", "yield", "expression" ]
Ensure all sources are ready to be queried .
def parse ( self , only_known = False ) : self . parsed = not only_known or self . parsed logger . info ( 'parsing parameters' ) logger . debug ( 'sys.argv: %s' , sys . argv ) if only_known : args = [ _ for _ in copy . copy ( sys . argv ) if not re . match ( '-h|--help' , _ ) ] self . _group_parsers [ 'default' ] . parse_known_args ( args = args , namespace = self . _argument_namespace ) else : self . _group_parsers [ 'default' ] . parse_args ( namespace = self . _argument_namespace )
5,153
https://github.com/alunduil/crumbs/blob/94b23f45db3054000d16968a44400780c6cff5ba/crumbs/__init__.py#L466-L501
[ "def", "reassign_comment_to_book", "(", "self", ",", "comment_id", ",", "from_book_id", ",", "to_book_id", ")", ":", "# Implemented from template for", "# osid.resource.ResourceBinAssignmentSession.reassign_resource_to_bin", "self", ".", "assign_comment_to_book", "(", "comment_id", ",", "to_book_id", ")", "try", ":", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "from_book_id", ")", "except", ":", "# something went wrong, roll back assignment to to_book_id", "self", ".", "unassign_comment_from_book", "(", "comment_id", ",", "to_book_id", ")", "raise" ]
Explicitly read the configuration files .
def read_configuration_files ( self ) : for file_name , configuration_parser in self . configuration_files . items ( ) : if os . access ( file_name , os . R_OK ) : configuration_parser . read ( file_name ) else : logger . warn ( 'could not read %s' , file_name ) warnings . warn ( 'could not read {}' . format ( file_name ) , ResourceWarning )
5,154
https://github.com/alunduil/crumbs/blob/94b23f45db3054000d16968a44400780c6cff5ba/crumbs/__init__.py#L503-L520
[ "def", "psychrometric_vapor_pressure_wet", "(", "dry_bulb_temperature", ",", "wet_bulb_temperature", ",", "pressure", ",", "psychrometer_coefficient", "=", "6.21e-4", "/", "units", ".", "kelvin", ")", ":", "return", "(", "saturation_vapor_pressure", "(", "wet_bulb_temperature", ")", "-", "psychrometer_coefficient", "*", "pressure", "*", "(", "dry_bulb_temperature", "-", "wet_bulb_temperature", ")", ".", "to", "(", "'kelvin'", ")", ")" ]
Return the number of genes
def nr_genes ( self ) : if self [ 'genes' ] : nr_genes = len ( self [ 'genes' ] ) else : nr_genes = len ( self [ 'gene_symbols' ] ) return nr_genes
5,155
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L44-L50
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
Readable name for the variant .
def display_name ( self ) : if self . is_snv : gene_ids = self . gene_symbols [ : 2 ] return ', ' . join ( gene_ids ) else : return "{this.cytoband_start} ({this.sv_len})" . format ( this = self )
5,156
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L58-L64
[ "def", "density_hub", "(", "self", ",", "weather_df", ")", ":", "if", "self", ".", "density_model", "!=", "'interpolation_extrapolation'", ":", "temperature_hub", "=", "self", ".", "temperature_hub", "(", "weather_df", ")", "# Calculation of density in kg/m³ at hub height", "if", "self", ".", "density_model", "==", "'barometric'", ":", "logging", ".", "debug", "(", "'Calculating density using barometric height '", "'equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "barometric", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'ideal_gas'", ":", "logging", ".", "debug", "(", "'Calculating density using ideal gas equation.'", ")", "closest_height", "=", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "min", "(", "range", "(", "len", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", ")", ")", ",", "key", "=", "lambda", "i", ":", "abs", "(", "weather_df", "[", "'pressure'", "]", ".", "columns", "[", "i", "]", "-", "self", ".", "power_plant", ".", "hub_height", ")", ")", "]", "density_hub", "=", "density", ".", "ideal_gas", "(", "weather_df", "[", "'pressure'", "]", "[", "closest_height", "]", ",", "closest_height", ",", "self", ".", "power_plant", ".", "hub_height", ",", "temperature_hub", ")", "elif", "self", ".", "density_model", "==", "'interpolation_extrapolation'", ":", "logging", ".", "debug", "(", "'Calculating density using linear inter- or '", "'extrapolation.'", ")", "density_hub", "=", "tools", ".", "linear_interpolation_extrapolation", "(", "weather_df", "[", "'density'", "]", ",", "self", ".", "power_plant", ".", "hub_height", ")", "else", ":", "raise", "ValueError", "(", "\"'{0}' is an invalid value. \"", ".", "format", "(", "self", ".", "density_model", ")", "+", "\"`density_model` \"", "+", "\"must be 'barometric', 'ideal_gas' or \"", "+", "\"'interpolation_extrapolation'.\"", ")", "return", "density_hub" ]
Return a md5 key string based on position ref and alt
def md5 ( self ) : return hashlib . md5 ( '_' . join ( [ self . CHROM , str ( self . POS ) , self . REF , self . ALT ] ) ) . hexdigest ( )
5,157
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L67-L70
[ "def", "blocking_request", "(", "self", ",", "msg", ",", "timeout", "=", "None", ",", "use_mid", "=", "None", ")", ":", "assert", "(", "get_thread_ident", "(", ")", "!=", "self", ".", "ioloop_thread_id", ")", ",", "(", "'Cannot call blocking_request() in ioloop'", ")", "if", "timeout", "is", "None", ":", "timeout", "=", "self", ".", "_request_timeout", "f", "=", "Future", "(", ")", "# for thread safety", "tf", "=", "[", "None", "]", "# Placeholder for tornado Future for exception tracebacks", "def", "blocking_request_callback", "(", ")", ":", "try", ":", "tf", "[", "0", "]", "=", "frf", "=", "self", ".", "future_request", "(", "msg", ",", "timeout", "=", "timeout", ",", "use_mid", "=", "use_mid", ")", "except", "Exception", ":", "tf", "[", "0", "]", "=", "frf", "=", "tornado_Future", "(", ")", "frf", ".", "set_exc_info", "(", "sys", ".", "exc_info", "(", ")", ")", "gen", ".", "chain_future", "(", "frf", ",", "f", ")", "self", ".", "ioloop", ".", "add_callback", "(", "blocking_request_callback", ")", "# We wait on the future result that should be set by the reply", "# handler callback. If this does not occur within the", "# timeout it means something unexpected went wrong. We give it", "# an extra second to deal with (unlikely?) slowness in the", "# rest of the code.", "extra_wait", "=", "1", "wait_timeout", "=", "timeout", "if", "wait_timeout", "is", "not", "None", ":", "wait_timeout", "=", "wait_timeout", "+", "extra_wait", "try", ":", "return", "f", ".", "result", "(", "timeout", "=", "wait_timeout", ")", "except", "TimeoutError", ":", "raise", "RuntimeError", "(", "'Unexpected error: Async request handler did '", "'not call reply handler within timeout period'", ")", "except", "Exception", ":", "# Use the tornado future to give us a usable traceback", "tf", "[", "0", "]", ".", "result", "(", ")", "assert", "False" ]
Add a frequency that will be displayed on the variant level
def add_frequency ( self , name , value ) : logger . debug ( "Adding frequency {0} with value {1} to variant {2}" . format ( name , value , self [ 'variant_id' ] ) ) self [ 'frequencies' ] . append ( { 'label' : name , 'value' : value } )
5,158
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L81-L89
[ "def", "user_deleted_from_site_event", "(", "event", ")", ":", "userid", "=", "event", ".", "principal", "catalog", "=", "api", ".", "portal", ".", "get_tool", "(", "'portal_catalog'", ")", "query", "=", "{", "'object_provides'", ":", "WORKSPACE_INTERFACE", "}", "query", "[", "'workspace_members'", "]", "=", "userid", "workspaces", "=", "[", "IWorkspace", "(", "b", ".", "_unrestrictedGetObject", "(", ")", ")", "for", "b", "in", "catalog", ".", "unrestrictedSearchResults", "(", "query", ")", "]", "for", "workspace", "in", "workspaces", ":", "workspace", ".", "remove_from_team", "(", "userid", ")" ]
Set the max frequency for the variant
def set_max_freq ( self , max_freq = None ) : if max_freq : self [ 'max_freq' ] = max_freq else : for frequency in self [ 'frequencies' ] : if self [ 'max_freq' ] : if frequency [ 'value' ] > self [ 'max_freq' ] : self [ 'max_freq' ] = frequency [ 'value' ] else : self [ 'max_freq' ] = frequency [ 'value' ] return
5,159
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L91-L109
[ "def", "_parse_path", "(", "self", ",", "path", ")", ":", "handle", ",", "path", "=", "self", ".", "_split_path", "(", "path", ")", "if", "self", ".", "_machine", "is", "not", "None", ":", "handle", "=", "self", ".", "_connect_hive", "(", "handle", ")", "return", "handle", ",", "path" ]
Add a severity to the variant
def add_severity ( self , name , value ) : logger . debug ( "Adding severity {0} with value {1} to variant {2}" . format ( name , value , self [ 'variant_id' ] ) ) self [ 'severities' ] . append ( { name : value } )
5,160
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L111-L120
[ "def", "_get_session", "(", "server", ")", ":", "if", "server", "in", "_sessions", ":", "return", "_sessions", "[", "server", "]", "config", "=", "_get_spacewalk_configuration", "(", "server", ")", "if", "not", "config", ":", "raise", "Exception", "(", "'No config for \\'{0}\\' found on master'", ".", "format", "(", "server", ")", ")", "session", "=", "_get_client_and_key", "(", "config", "[", "'api_url'", "]", ",", "config", "[", "'username'", "]", ",", "config", "[", "'password'", "]", ")", "atexit", ".", "register", "(", "_disconnect_session", ",", "session", ")", "client", "=", "session", "[", "'client'", "]", "key", "=", "session", "[", "'key'", "]", "_sessions", "[", "server", "]", "=", "(", "client", ",", "key", ")", "return", "client", ",", "key" ]
Add the information for a individual
def add_individual ( self , genotype ) : logger . debug ( "Adding genotype {0} to variant {1}" . format ( genotype , self [ 'variant_id' ] ) ) self [ 'individuals' ] . append ( genotype )
5,161
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L122-L132
[ "def", "refresh_table_metadata", "(", "self", ",", "keyspace", ",", "table", ",", "max_schema_agreement_wait", "=", "None", ")", ":", "if", "not", "self", ".", "control_connection", ".", "refresh_schema", "(", "target_type", "=", "SchemaTargetType", ".", "TABLE", ",", "keyspace", "=", "keyspace", ",", "table", "=", "table", ",", "schema_agreement_wait", "=", "max_schema_agreement_wait", ",", "force", "=", "True", ")", ":", "raise", "DriverException", "(", "\"Table metadata was not refreshed. See log for details.\"", ")" ]
Add the information transcript
def add_transcript ( self , transcript ) : logger . debug ( "Adding transcript {0} to variant {1}" . format ( transcript , self [ 'variant_id' ] ) ) self [ 'transcripts' ] . append ( transcript )
5,162
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L134-L144
[ "def", "match_color_index", "(", "self", ",", "color", ")", ":", "from", "jcvi", ".", "utils", ".", "webcolors", "import", "color_diff", "if", "isinstance", "(", "color", ",", "int", ")", ":", "return", "color", "if", "color", ":", "if", "isinstance", "(", "color", ",", "six", ".", "string_types", ")", ":", "rgb", "=", "map", "(", "int", ",", "color", ".", "split", "(", "','", ")", ")", "else", ":", "rgb", "=", "color", ".", "Get", "(", ")", "logging", ".", "disable", "(", "logging", ".", "DEBUG", ")", "distances", "=", "[", "color_diff", "(", "rgb", ",", "x", ")", "for", "x", "in", "self", ".", "xlwt_colors", "]", "logging", ".", "disable", "(", "logging", ".", "NOTSET", ")", "result", "=", "distances", ".", "index", "(", "min", "(", "distances", ")", ")", "self", ".", "unused_colors", ".", "discard", "(", "self", ".", "xlwt_colors", "[", "result", "]", ")", "return", "result" ]
Add the information of a gene
def add_gene ( self , gene ) : logger . debug ( "Adding gene {0} to variant {1}" . format ( gene , self [ 'variant_id' ] ) ) self [ 'genes' ] . append ( gene )
5,163
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L146-L157
[ "def", "start_transmit", "(", "self", ",", "blocking", "=", "False", ",", "start_packet_groups", "=", "True", ",", "*", "ports", ")", ":", "port_list", "=", "self", ".", "set_ports_list", "(", "*", "ports", ")", "if", "start_packet_groups", ":", "port_list_for_packet_groups", "=", "self", ".", "ports", ".", "values", "(", ")", "port_list_for_packet_groups", "=", "self", ".", "set_ports_list", "(", "*", "port_list_for_packet_groups", ")", "self", ".", "api", ".", "call_rc", "(", "'ixClearTimeStamp {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartPacketGroups {}'", ".", "format", "(", "port_list_for_packet_groups", ")", ")", "self", ".", "api", ".", "call_rc", "(", "'ixStartTransmit {}'", ".", "format", "(", "port_list", ")", ")", "time", ".", "sleep", "(", "0.2", ")", "if", "blocking", ":", "self", ".", "wait_transmit", "(", "*", "ports", ")" ]
Add the information of a compound variant
def add_compound ( self , compound ) : logger . debug ( "Adding compound {0} to variant {1}" . format ( compound , self [ 'variant_id' ] ) ) self [ 'compounds' ] . append ( compound )
5,164
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L159-L170
[ "def", "extract_new", "(", "cls", ")", "->", "DevicesTypeUnbound", ":", "devices", "=", "cls", ".", "get_handlerclass", "(", ")", "(", "*", "_selection", "[", "cls", "]", ")", "_selection", "[", "cls", "]", ".", "clear", "(", ")", "return", "devices" ]
Set the variant id for this variant
def _set_variant_id ( self , variant_id = None ) : if not variant_id : variant_id = '_' . join ( [ self . CHROM , str ( self . POS ) , self . REF , self . ALT ] ) logger . debug ( "Updating variant id to {0}" . format ( variant_id ) ) self [ 'variant_id' ] = variant_id
5,165
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/models/variant.py#L181-L194
[ "def", "_get_session", "(", "server", ")", ":", "if", "server", "in", "_sessions", ":", "return", "_sessions", "[", "server", "]", "config", "=", "_get_spacewalk_configuration", "(", "server", ")", "if", "not", "config", ":", "raise", "Exception", "(", "'No config for \\'{0}\\' found on master'", ".", "format", "(", "server", ")", ")", "session", "=", "_get_client_and_key", "(", "config", "[", "'api_url'", "]", ",", "config", "[", "'username'", "]", ",", "config", "[", "'password'", "]", ")", "atexit", ".", "register", "(", "_disconnect_session", ",", "session", ")", "client", "=", "session", "[", "'client'", "]", "key", "=", "session", "[", "'key'", "]", "_sessions", "[", "server", "]", "=", "(", "client", ",", "key", ")", "return", "client", ",", "key" ]
Moves element from lst available at lst_idx .
def move_to_result ( self , lst_idx ) : self . in_result_idx . add ( lst_idx ) if lst_idx in self . not_in_result_root_match_idx : self . not_in_result_root_match_idx . remove ( lst_idx )
5,166
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/stats.py#L72-L77
[ "def", "create_decompress", "(", "fmt", ")", ":", "OPENJPEG", ".", "opj_create_decompress", ".", "argtypes", "=", "[", "ctypes", ".", "c_int", "]", "restype", "=", "ctypes", ".", "POINTER", "(", "DecompressionInfoType", ")", "OPENJPEG", ".", "opj_create_decompress", ".", "restype", "=", "restype", "dinfo", "=", "OPENJPEG", ".", "opj_create_decompress", "(", "fmt", ")", "return", "dinfo" ]
Adds a match for the elements avaialble at lst_idx and root_idx .
def add_root_match ( self , lst_idx , root_idx ) : self . root_matches [ lst_idx ] = root_idx if lst_idx in self . in_result_idx : return self . not_in_result_root_match_idx . add ( lst_idx )
5,167
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/stats.py#L79-L85
[ "def", "loadInternalSheet", "(", "klass", ",", "p", ",", "*", "*", "kwargs", ")", ":", "vs", "=", "klass", "(", "p", ".", "name", ",", "source", "=", "p", ",", "*", "*", "kwargs", ")", "options", ".", "_set", "(", "'encoding'", ",", "'utf8'", ",", "vs", ")", "if", "p", ".", "exists", "(", ")", ":", "vd", ".", "sheets", ".", "insert", "(", "0", ",", "vs", ")", "vs", ".", "reload", ".", "__wrapped__", "(", "vs", ")", "vd", ".", "sheets", ".", "pop", "(", "0", ")", "return", "vs" ]
Add all transcripts for a variant
def _add_transcripts ( self , variant_obj , gemini_variant ) : query = "SELECT * from variant_impacts WHERE variant_id = {0}" . format ( gemini_variant [ 'variant_id' ] ) gq = GeminiQuery ( self . db ) gq . run ( query ) for gemini_transcript in gq : transcript = Transcript ( hgnc_symbol = gemini_transcript [ 'gene' ] , transcript_id = gemini_transcript [ 'transcript' ] , consequence = gemini_transcript [ 'impact_so' ] , biotype = gemini_transcript [ 'biotype' ] , polyphen = gemini_transcript [ 'polyphen_pred' ] , sift = gemini_transcript [ 'sift_pred' ] , HGVSc = gemini_transcript [ 'codon_change' ] , HGVSp = ', ' . join ( [ gemini_transcript [ 'aa_change' ] or '' , gemini_transcript [ 'aa_length' ] or '' ] ) ) variant_obj . add_transcript ( transcript )
5,168
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/gemini/mixins/variant_extras/transcripts.py#L8-L39
[ "async", "def", "wind_type_classe", "(", "self", ")", ":", "data", "=", "await", "self", ".", "retrieve", "(", "url", "=", "API_WIND_TYPE", ")", "self", ".", "wind_type", "=", "dict", "(", ")", "for", "_type", "in", "data", "[", "'data'", "]", ":", "self", ".", "wind_type", "[", "int", "(", "_type", "[", "'classWindSpeed'", "]", ")", "]", "=", "_type", "[", "'descClassWindSpeedDailyPT'", "]", "return", "self", ".", "wind_type" ]
MySQL row - based binlog events pub .
def mysql_pub ( mysql_dsn , tables = None , blocking = False , * * kwargs ) : # parse mysql settings parsed = urlparse ( mysql_dsn ) mysql_settings = { "host" : parsed . hostname , "port" : parsed . port or 3306 , "user" : parsed . username , "passwd" : parsed . password } # connect to binlog stream stream = pymysqlreplication . BinLogStreamReader ( mysql_settings , server_id = random . randint ( 1000000000 , 4294967295 ) , blocking = blocking , only_events = [ DeleteRowsEvent , UpdateRowsEvent , WriteRowsEvent ] , * * kwargs ) def _pk ( values ) : if isinstance ( event . primary_key , str ) : return values [ event . primary_key ] return tuple ( values [ k ] for k in event . primary_key ) for event in stream : if not event . primary_key : continue if tables and event . table not in tables : continue try : rows = event . rows except ( UnicodeDecodeError , ValueError ) as e : logger . exception ( e ) continue timestamp = datetime . datetime . fromtimestamp ( event . timestamp ) if isinstance ( event , WriteRowsEvent ) : sg_name = "%s_write" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) elif isinstance ( event , UpdateRowsEvent ) : sg_name = "%s_update" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "after_values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) elif isinstance ( event , DeleteRowsEvent ) : sg_name = "%s_delete" % event . table sg = signal ( sg_name ) sg_raw = signal ( "%s_raw" % sg_name ) for row in rows : pk = _pk ( row [ "values" ] ) sg . send ( pk ) sg_raw . send ( row ) logger . debug ( "%s -> %s, %s" % ( sg_name , pk , timestamp ) ) signal ( "mysql_binlog_pos" ) . send ( "%s:%s" % ( stream . log_file , stream . log_pos ) )
5,169
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/pub/mysql.py#L50-L180
[ "def", "set_cache_expiry", "(", "response", ")", ":", "if", "response", ".", "cache_control", ".", "max_age", "is", "None", "and", "'CACHE_DEFAULT_TIMEOUT'", "in", "config", ".", "cache", ":", "response", ".", "cache_control", ".", "max_age", "=", "config", ".", "cache", "[", "'CACHE_DEFAULT_TIMEOUT'", "]", "return", "response" ]
Loads mol file in rdkit without the hydrogens - they do not have to appear in the final figure . Once loaded the molecule is converted to SMILES format which RDKit appears to draw best - since we do not care about the actual coordinates of the original molecule it is sufficient to have just 2D information . Some molecules can be problematic to import and steps such as stopping sanitize function can be taken . This is done automatically if problems are observed . However better solutions can also be implemented and need more research . The molecule is then drawn from SMILES in 2D representation without hydrogens . The drawing is saved as an SVG file .
def load_molecule_in_rdkit_smiles ( self , molSize , kekulize = True , bonds = [ ] , bond_color = None , atom_color = { } , size = { } ) : mol_in_rdkit = self . topology_data . mol #need to reload without hydrogens try : mol_in_rdkit = Chem . RemoveHs ( mol_in_rdkit ) self . topology_data . smiles = Chem . MolFromSmiles ( Chem . MolToSmiles ( mol_in_rdkit ) ) except ValueError : mol_in_rdkit = Chem . RemoveHs ( mol_in_rdkit , sanitize = False ) self . topology_data . smiles = Chem . MolFromSmiles ( Chem . MolToSmiles ( mol_in_rdkit ) , sanitize = False ) self . atom_identities = { } i = 0 for atom in self . topology_data . smiles . GetAtoms ( ) : self . atom_identities [ mol_in_rdkit . GetProp ( '_smilesAtomOutputOrder' ) [ 1 : ] . rsplit ( "," ) [ i ] ] = atom . GetIdx ( ) i += 1 mc = Chem . Mol ( self . topology_data . smiles . ToBinary ( ) ) if kekulize : try : Chem . Kekulize ( mc ) except : mc = Chem . Mol ( self . topology_data . smiles . ToBinary ( ) ) if not mc . GetNumConformers ( ) : rdDepictor . Compute2DCoords ( mc ) atoms = [ ] colors = { } for i in range ( mol_in_rdkit . GetNumAtoms ( ) ) : atoms . append ( i ) if len ( atom_color ) == 0 : colors [ i ] = ( 1 , 1 , 1 ) else : colors = atom_color drawer = rdMolDraw2D . MolDraw2DSVG ( int ( molSize [ 0 ] ) , int ( molSize [ 1 ] ) ) drawer . DrawMolecule ( mc , highlightAtoms = atoms , highlightBonds = bonds , highlightAtomColors = colors , highlightAtomRadii = size , highlightBondColors = bond_color ) drawer . FinishDrawing ( ) self . svg = drawer . GetDrawingText ( ) . replace ( 'svg:' , '' ) filesvg = open ( "molecule.svg" , "w+" ) filesvg . write ( self . svg )
5,170
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/molecule.py#L48-L94
[ "def", "put", "(", "self", ",", "path", ",", "value", ",", "timeout", "=", "None", ",", "event_timeout", "=", "None", ")", ":", "future", "=", "self", ".", "put_async", "(", "path", ",", "value", ")", "self", ".", "wait_all_futures", "(", "future", ",", "timeout", "=", "timeout", ",", "event_timeout", "=", "event_timeout", ")", "return", "future", ".", "result", "(", ")" ]
Calculate overlap in 2D space
def calc_2d_forces ( self , x1 , y1 , x2 , y2 , width ) : #calculate a if x1 > x2 : a = x1 - x2 else : a = x2 - x1 a_sq = a * a #calculate b if y1 > y2 : b = y1 - y2 else : b = y2 - y1 b_sq = b * b #calculate c from math import sqrt c_sq = a_sq + b_sq c = sqrt ( c_sq ) if c > width : return 0 , 0 else : overlap = width - c return - overlap / 2 , overlap / 2
5,171
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/molecule.py#L162-L189
[ "def", "pem_managed", "(", "name", ",", "text", ",", "backup", "=", "False", ",", "*", "*", "kwargs", ")", ":", "file_args", ",", "kwargs", "=", "_get_file_args", "(", "name", ",", "*", "*", "kwargs", ")", "file_args", "[", "'contents'", "]", "=", "salt", ".", "utils", ".", "stringutils", ".", "to_str", "(", "__salt__", "[", "'x509.get_pem_entry'", "]", "(", "text", "=", "text", ")", ")", "return", "__states__", "[", "'file.managed'", "]", "(", "*", "*", "file_args", ")" ]
Calculates forces between two diagrams and pushes them apart by tenth of width
def do_step ( self , values , xy_values , coeff , width ) : forces = { k : [ ] for k , i in enumerate ( xy_values ) } for ( index1 , value1 ) , ( index2 , value2 ) in combinations ( enumerate ( xy_values ) , 2 ) : f = self . calc_2d_forces ( value1 [ 0 ] , value1 [ 1 ] , value2 [ 0 ] , value2 [ 1 ] , width ) if coeff [ index1 ] < coeff [ index2 ] : if self . b_lenght - coeff [ index2 ] < self . b_lenght / 10 : #a quick and dirty solution, but works forces [ index1 ] . append ( f [ 1 ] ) # push to left (smaller projection value) forces [ index2 ] . append ( f [ 0 ] ) else : #all is normal forces [ index1 ] . append ( f [ 0 ] ) # push to left (smaller projection value) forces [ index2 ] . append ( f [ 1 ] ) else : if self . b_lenght - coeff [ index1 ] < self . b_lenght / 10 : #a quick and dirty solution, but works forces [ index1 ] . append ( f [ 0 ] ) # push to left (smaller projection value) forces [ index2 ] . append ( f [ 1 ] ) else : #if all is normal forces [ index1 ] . append ( f [ 1 ] ) # push to left (smaller projection value) forces [ index2 ] . append ( f [ 0 ] ) forces = { k : sum ( v ) for k , v in forces . items ( ) } energy = sum ( [ abs ( x ) for x in forces . values ( ) ] ) return [ ( forces [ k ] / 10 + v ) for k , v in enumerate ( values ) ] , energy
5,172
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/molecule.py#L192-L217
[ "def", "construct_error_message", "(", "driver_id", ",", "error_type", ",", "message", ",", "timestamp", ")", ":", "builder", "=", "flatbuffers", ".", "Builder", "(", "0", ")", "driver_offset", "=", "builder", ".", "CreateString", "(", "driver_id", ".", "binary", "(", ")", ")", "error_type_offset", "=", "builder", ".", "CreateString", "(", "error_type", ")", "message_offset", "=", "builder", ".", "CreateString", "(", "message", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataStart", "(", "builder", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddDriverId", "(", "builder", ",", "driver_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddType", "(", "builder", ",", "error_type_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddErrorMessage", "(", "builder", ",", "message_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddTimestamp", "(", "builder", ",", "timestamp", ")", "error_data_offset", "=", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataEnd", "(", "builder", ")", "builder", ".", "Finish", "(", "error_data_offset", ")", "return", "bytes", "(", "builder", ".", "Output", "(", ")", ")" ]
Fetch variants for a case .
def variants ( self , case_id , skip = 0 , count = 1000 , filters = None ) : filters = filters or { } logger . debug ( "Fetching case with case_id: {0}" . format ( case_id ) ) case_obj = self . case ( case_id ) plugin , case_id = self . select_plugin ( case_obj ) self . filters = plugin . filters gene_lists = ( self . gene_list ( list_id ) for list_id in filters . get ( 'gene_lists' , [ ] ) ) nested_geneids = ( gene_list . gene_ids for gene_list in gene_lists ) gene_ids = set ( itertools . chain . from_iterable ( nested_geneids ) ) if filters . get ( 'gene_ids' ) : filters [ 'gene_ids' ] . extend ( gene_ids ) else : filters [ 'gene_ids' ] = gene_ids variants = plugin . variants ( case_id , skip , count , filters ) return variants
5,173
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/variant.py#L11-L29
[ "def", "_CreateDynamicDisplayAdSettings", "(", "media_service", ",", "opener", ")", ":", "image", "=", "_CreateImage", "(", "media_service", ",", "opener", ",", "'https://goo.gl/dEvQeF'", ")", "logo", "=", "{", "'type'", ":", "'IMAGE'", ",", "'mediaId'", ":", "image", "[", "'mediaId'", "]", ",", "'xsi_type'", ":", "'Image'", "}", "dynamic_settings", "=", "{", "'landscapeLogoImage'", ":", "logo", ",", "'pricePrefix'", ":", "'as low as'", ",", "'promoText'", ":", "'Free shipping!'", ",", "'xsi_type'", ":", "'DynamicSettings'", ",", "}", "return", "dynamic_settings" ]
Fetch a single variant from variant source .
def variant ( self , case_id , variant_id ) : case_obj = self . case ( case_id ) plugin , case_id = self . select_plugin ( case_obj ) variant = plugin . variant ( case_id , variant_id ) return variant
5,174
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/sql/mixins/variant.py#L31-L36
[ "def", "_parse_path", "(", "self", ",", "path", ")", ":", "handle", ",", "path", "=", "self", ".", "_split_path", "(", "path", ")", "if", "self", ".", "_machine", "is", "not", "None", ":", "handle", "=", "self", ".", "_connect_hive", "(", "handle", ")", "return", "handle", ",", "path" ]
Redis EventSourcing sub .
def redis_es_sub ( session , tables , redis_dsn , strict = False , namespace = None , ttl = 3600 * 24 * 3 , socket_timeout = 1 ) : logger = logging . getLogger ( "meepo.sub.redis_es_sub" ) if not isinstance ( tables , ( list , set ) ) : raise ValueError ( "tables should be list or set" ) # install event store hook for tables event_store = RedisEventStore ( redis_dsn , namespace = namespace , ttl = ttl , socket_timeout = socket_timeout ) def _es_event_sub ( pk , event ) : if event_store . add ( event , str ( pk ) ) : logger . info ( "%s: %s -> %s" % ( event , pk , datetime . datetime . now ( ) ) ) else : logger . error ( "event sourcing failed: %s" % pk ) events = ( "%s_%s" % ( tb , action ) for tb , action in itertools . product ( * [ tables , [ "write" , "update" , "delete" ] ] ) ) for event in events : sub_func = functools . partial ( _es_event_sub , event = event ) signal ( event ) . connect ( sub_func , weak = False ) # install prepare-commit hook prepare_commit = RedisPrepareCommit ( redis_dsn , strict = strict , namespace = namespace , socket_timeout = socket_timeout ) signal ( "session_prepare" ) . connect ( prepare_commit . prepare , sender = session , weak = False ) signal ( "session_commit" ) . connect ( prepare_commit . commit , sender = session , weak = False ) signal ( "session_rollback" ) . connect ( prepare_commit . rollback , sender = session , weak = False ) return event_store , prepare_commit
5,175
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/sub.py#L16-L71
[ "def", "consumer_commit_for_times", "(", "consumer", ",", "partition_to_offset", ",", "atomic", "=", "False", ")", ":", "no_offsets", "=", "set", "(", ")", "for", "tp", ",", "offset", "in", "six", ".", "iteritems", "(", "partition_to_offset", ")", ":", "if", "offset", "is", "None", ":", "logging", ".", "error", "(", "\"No offsets found for topic-partition {tp}. Either timestamps not supported\"", "\" for the topic {tp}, or no offsets found after timestamp specified, or there is no\"", "\" data in the topic-partition.\"", ".", "format", "(", "tp", "=", "tp", ")", ",", ")", "no_offsets", ".", "add", "(", "tp", ")", "if", "atomic", "and", "len", "(", "no_offsets", ")", ">", "0", ":", "logging", ".", "error", "(", "\"Commit aborted; offsets were not found for timestamps in\"", "\" topics {}\"", ".", "format", "(", "\",\"", ".", "join", "(", "[", "str", "(", "tp", ")", "for", "tp", "in", "no_offsets", "]", ")", ")", ",", ")", "return", "offsets_metadata", "=", "{", "tp", ":", "OffsetAndMetadata", "(", "partition_to_offset", "[", "tp", "]", ".", "offset", ",", "metadata", "=", "None", ")", "for", "tp", "in", "six", ".", "iterkeys", "(", "partition_to_offset", ")", "if", "tp", "not", "in", "no_offsets", "}", "if", "len", "(", "offsets_metadata", ")", "!=", "0", ":", "consumer", ".", "commit", "(", "offsets_metadata", ")" ]
Setup an ArgumentParser .
def setup_parser ( ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '-p' , '--port' , type = int , default = 5005 ) parser . add_argument ( '-i' , '--interval' , type = int , default = 480 ) parser . add_argument ( 'host' , type = str , help = 'hostname' ) return parser
5,176
https://github.com/jalmeroth/pymusiccast/blob/616379ae22d6b518c61042d58be6d18a46242168/musiccast.py#L13-L19
[ "def", "create_session", "(", "self", ",", "lock_type", "=", "library", ".", "LockType", ".", "shared", ",", "session", "=", "None", ")", ":", "if", "session", "is", "None", ":", "session", "=", "library", ".", "ISession", "(", ")", "# NOTE: The following hack handles the issue of unknown machine state.", "# This occurs most frequently when a machine is powered off and", "# in spite waiting for the completion event to end, the state of", "# machine still raises the following Error:", "# virtualbox.library.VBoxErrorVmError: 0x80bb0003 (Failed to \\", "# get a console object from the direct session (Unknown \\", "# Status 0x80BB0002))", "error", "=", "None", "for", "_", "in", "range", "(", "10", ")", ":", "try", ":", "self", ".", "lock_machine", "(", "session", ",", "lock_type", ")", "except", "Exception", "as", "exc", ":", "error", "=", "exc", "time", ".", "sleep", "(", "1", ")", "continue", "else", ":", "break", "else", ":", "if", "error", "is", "not", "None", ":", "raise", "Exception", "(", "\"Failed to create clone - %s\"", "%", "error", ")", "return", "session" ]
Connect to a McDevice
def main ( ) : args = setup_parser ( ) . parse_args ( ) host = getattr ( args , "host" ) port = getattr ( args , "port" ) ipv4 = socket . gethostbyname ( host ) interval = getattr ( args , "interval" ) receiver = McDevice ( ipv4 , udp_port = port , mc_interval = interval ) receiver . handle_status ( ) # wait for UDP messages while True : time . sleep ( 0.2 )
5,177
https://github.com/jalmeroth/pymusiccast/blob/616379ae22d6b518c61042d58be6d18a46242168/musiccast.py#L22-L35
[ "async", "def", "jsk_retain", "(", "self", ",", "ctx", ":", "commands", ".", "Context", ",", "*", ",", "toggle", ":", "bool", "=", "None", ")", ":", "if", "toggle", "is", "None", ":", "if", "self", ".", "retain", ":", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is set to ON.\"", ")", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is set to OFF.\"", ")", "if", "toggle", ":", "if", "self", ".", "retain", ":", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is already set to ON.\"", ")", "self", ".", "retain", "=", "True", "self", ".", "_scope", "=", "Scope", "(", ")", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is ON. Future REPL sessions will retain their scope.\"", ")", "if", "not", "self", ".", "retain", ":", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is already set to OFF.\"", ")", "self", ".", "retain", "=", "False", "return", "await", "ctx", ".", "send", "(", "\"Variable retention is OFF. Future REPL sessions will dispose their scope when done.\"", ")" ]
Initialize a new Ghost API client reading the client ID and secret from the SQlite database .
def from_sqlite ( cls , database_path , base_url , version = 'auto' , client_id = 'ghost-admin' ) : import os import sqlite3 fd = os . open ( database_path , os . O_RDONLY ) connection = sqlite3 . connect ( '/dev/fd/%d' % fd ) os . close ( fd ) try : row = connection . execute ( 'SELECT secret FROM clients WHERE slug = ?' , ( client_id , ) ) . fetchone ( ) if row : return cls ( base_url , version = version , client_id = client_id , client_secret = row [ 0 ] ) else : raise GhostException ( 401 , [ { 'errorType' : 'InternalError' , 'message' : 'No client_secret found for client_id: %s' % client_id } ] ) finally : connection . close ( )
5,178
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L142-L180
[ "def", "uunion1d", "(", "arr1", ",", "arr2", ")", ":", "v", "=", "np", ".", "union1d", "(", "arr1", ",", "arr2", ")", "v", "=", "_validate_numpy_wrapper_units", "(", "v", ",", "[", "arr1", ",", "arr2", "]", ")", "return", "v" ]
Authenticate with the server .
def login ( self , username , password ) : data = self . _authenticate ( grant_type = 'password' , username = username , password = password , client_id = self . _client_id , client_secret = self . _client_secret ) self . _username = username self . _password = password return data
5,179
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L201-L221
[ "def", "_CollectHistory_", "(", "lookupType", ",", "fromVal", ",", "toVal", ",", "using", "=", "{", "}", ",", "pattern", "=", "''", ")", ":", "histObj", "=", "{", "}", "if", "fromVal", "!=", "toVal", ":", "histObj", "[", "lookupType", "]", "=", "{", "\"from\"", ":", "fromVal", ",", "\"to\"", ":", "toVal", "}", "if", "lookupType", "in", "[", "'deriveValue'", ",", "'deriveRegex'", ",", "'copyValue'", ",", "'normIncludes'", ",", "'deriveIncludes'", "]", "and", "using", "!=", "''", ":", "histObj", "[", "lookupType", "]", "[", "\"using\"", "]", "=", "using", "if", "lookupType", "in", "[", "'genericRegex'", ",", "'fieldSpecificRegex'", ",", "'normRegex'", ",", "'deriveRegex'", "]", "and", "pattern", "!=", "''", ":", "histObj", "[", "lookupType", "]", "[", "\"pattern\"", "]", "=", "pattern", "return", "histObj" ]
Re - authenticate using the refresh token if available . Otherwise log in using the username and password if it was used to authenticate initially .
def refresh_session ( self ) : if not self . _refresh_token : if self . _username and self . _password : return self . login ( self . _username , self . _password ) return return self . _authenticate ( grant_type = 'refresh_token' , refresh_token = self . _refresh_token , client_id = self . _client_id , client_secret = self . _client_secret )
5,180
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L223-L243
[ "def", "cli", "(", "file1", ",", "file2", ",", "comments", ")", "->", "int", ":", "sys", ".", "exit", "(", "compare_files", "(", "file1", ",", "file2", ",", "comments", ")", ")" ]
Revoke the access token currently in use .
def revoke_access_token ( self ) : if not self . _access_token : return self . execute_post ( 'authentication/revoke' , json = dict ( token_type_hint = 'access_token' , token = self . _access_token ) ) self . _access_token = None
5,181
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L260-L273
[ "def", "_find_files", "(", "self", ",", "entries", ",", "root", ",", "relative_path", ",", "file_name_regex", ")", ":", "self", ".", "log", "(", "[", "u\"Finding files within root: '%s'\"", ",", "root", "]", ")", "target", "=", "root", "if", "relative_path", "is", "not", "None", ":", "self", ".", "log", "(", "[", "u\"Joining relative path: '%s'\"", ",", "relative_path", "]", ")", "target", "=", "gf", ".", "norm_join", "(", "root", ",", "relative_path", ")", "self", ".", "log", "(", "[", "u\"Finding files within target: '%s'\"", ",", "target", "]", ")", "files", "=", "[", "]", "target_len", "=", "len", "(", "target", ")", "for", "entry", "in", "entries", ":", "if", "entry", ".", "startswith", "(", "target", ")", ":", "self", ".", "log", "(", "[", "u\"Examining entry: '%s'\"", ",", "entry", "]", ")", "entry_suffix", "=", "entry", "[", "target_len", "+", "1", ":", "]", "self", ".", "log", "(", "[", "u\"Examining entry suffix: '%s'\"", ",", "entry_suffix", "]", ")", "if", "re", ".", "search", "(", "file_name_regex", ",", "entry_suffix", ")", "is", "not", "None", ":", "self", ".", "log", "(", "[", "u\"Match: '%s'\"", ",", "entry", "]", ")", "files", ".", "append", "(", "entry", ")", "else", ":", "self", ".", "log", "(", "[", "u\"No match: '%s'\"", ",", "entry", "]", ")", "return", "sorted", "(", "files", ")" ]
Revoke the refresh token currently active .
def revoke_refresh_token ( self ) : if not self . _refresh_token : return self . execute_post ( 'authentication/revoke' , json = dict ( token_type_hint = 'refresh_token' , token = self . _refresh_token ) ) self . _refresh_token = None
5,182
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L275-L288
[ "def", "arcball_constrain_to_axis", "(", "point", ",", "axis", ")", ":", "v", "=", "np", ".", "array", "(", "point", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "a", "=", "np", ".", "array", "(", "axis", ",", "dtype", "=", "np", ".", "float64", ",", "copy", "=", "True", ")", "v", "-=", "a", "*", "np", ".", "dot", "(", "a", ",", "v", ")", "# on plane", "n", "=", "vector_norm", "(", "v", ")", "if", "n", ">", "_EPS", ":", "if", "v", "[", "2", "]", "<", "0.0", ":", "np", ".", "negative", "(", "v", ",", "v", ")", "v", "/=", "n", "return", "v", "if", "a", "[", "2", "]", "==", "1.0", ":", "return", "np", ".", "array", "(", "[", "1.0", ",", "0.0", ",", "0.0", "]", ")", "return", "unit_vector", "(", "[", "-", "a", "[", "1", "]", ",", "a", "[", "0", "]", ",", "0.0", "]", ")" ]
Log out revoking the access tokens and forgetting the login details if they were given .
def logout ( self ) : self . revoke_refresh_token ( ) self . revoke_access_token ( ) self . _username , self . _password = None , None
5,183
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L290-L299
[ "def", "add_manifold_attribute", "(", "cls", ",", "name", ")", ":", "if", "name", ".", "startswith", "(", "\"custom_\"", ")", ":", "try", ":", "return", "FieldDefinition", ".", "FIELDS", "[", "name", "]", "except", "KeyError", ":", "field", "=", "OnDemandField", "(", "fmt", ".", "to_unicode", ",", "name", ",", "\"custom attribute %r\"", "%", "name", ".", "split", "(", "'_'", ",", "1", ")", "[", "1", "]", ",", "matcher", "=", "matching", ".", "PatternFilter", ")", "setattr", "(", "cls", ",", "name", ",", "field", ")", "# add field to all proxy objects", "return", "field", "elif", "name", ".", "startswith", "(", "\"kind_\"", ")", "and", "name", "[", "5", ":", "]", ".", "isdigit", "(", ")", ":", "try", ":", "return", "FieldDefinition", ".", "FIELDS", "[", "name", "]", "except", "KeyError", ":", "limit", "=", "int", "(", "name", "[", "5", ":", "]", ".", "lstrip", "(", "'0'", ")", "or", "'0'", ",", "10", ")", "if", "limit", ">", "100", ":", "raise", "error", ".", "UserError", "(", "\"kind_N: N > 100 in %r\"", "%", "name", ")", "field", "=", "OnDemandField", "(", "set", ",", "name", ",", "\"kinds of files that make up more than %d%% of this item's size\"", "%", "limit", ",", "matcher", "=", "matching", ".", "TaggedAsFilter", ",", "formatter", "=", "_fmt_tags", ",", "engine_name", "=", "\"kind_%d\"", "%", "limit", ")", "setattr", "(", "cls", ",", "name", ",", "field", ")", "return", "field" ]
Upload an image and return its path on the server . Either file_obj or file_path or name and data has to be specified .
def upload ( self , file_obj = None , file_path = None , name = None , data = None ) : close = False if file_obj : file_name , content = os . path . basename ( file_obj . name ) , file_obj elif file_path : file_name , content = os . path . basename ( file_path ) , open ( file_path , 'rb' ) close = True elif name and data : file_name , content = name , data else : raise GhostException ( 400 , 'Either `file_obj` or `file_path` or ' '`name` and `data` needs to be specified' ) try : content_type , _ = mimetypes . guess_type ( file_name ) file_arg = ( file_name , content , content_type ) response = self . execute_post ( 'uploads/' , files = { 'uploadimage' : file_arg } ) return response finally : if close : content . close ( )
5,184
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L301-L343
[ "def", "convertToRateMatrix", "(", "self", ",", "Q", ")", ":", "rowSums", "=", "Q", ".", "sum", "(", "axis", "=", "1", ")", ".", "getA1", "(", ")", "idxRange", "=", "np", ".", "arange", "(", "Q", ".", "shape", "[", "0", "]", ")", "Qdiag", "=", "coo_matrix", "(", "(", "rowSums", ",", "(", "idxRange", ",", "idxRange", ")", ")", ",", "shape", "=", "Q", ".", "shape", ")", ".", "tocsr", "(", ")", "return", "Q", "-", "Qdiag" ]
Execute an HTTP GET request against the API endpoints . This method is meant for internal use .
def execute_get ( self , resource , * * kwargs ) : url = '%s/%s' % ( self . base_url , resource ) headers = kwargs . pop ( 'headers' , dict ( ) ) headers [ 'Accept' ] = 'application/json' headers [ 'Content-Type' ] = 'application/json' if kwargs : separator = '&' if '?' in url else '?' for key , value in kwargs . items ( ) : if hasattr ( value , '__iter__' ) and type ( value ) not in six . string_types : url = '%s%s%s=%s' % ( url , separator , key , ',' . join ( value ) ) else : url = '%s%s%s=%s' % ( url , separator , key , value ) separator = '&' if self . _access_token : headers [ 'Authorization' ] = 'Bearer %s' % self . _access_token else : separator = '&' if '?' in url else '?' url = '%s%sclient_id=%s&client_secret=%s' % ( url , separator , self . _client_id , self . _client_secret ) response = requests . get ( url , headers = headers ) if response . status_code // 100 != 2 : raise GhostException ( response . status_code , response . json ( ) . get ( 'errors' , [ ] ) ) return response . json ( )
5,185
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L346-L389
[ "def", "need_rejoin", "(", "self", ")", ":", "if", "not", "self", ".", "_subscription", ".", "partitions_auto_assigned", "(", ")", ":", "return", "False", "if", "self", ".", "_auto_assign_all_partitions", "(", ")", ":", "return", "False", "# we need to rejoin if we performed the assignment and metadata has changed", "if", "(", "self", ".", "_assignment_snapshot", "is", "not", "None", "and", "self", ".", "_assignment_snapshot", "!=", "self", ".", "_metadata_snapshot", ")", ":", "return", "True", "# we need to join if our subscription has changed since the last join", "if", "(", "self", ".", "_joined_subscription", "is", "not", "None", "and", "self", ".", "_joined_subscription", "!=", "self", ".", "_subscription", ".", "subscription", ")", ":", "return", "True", "return", "super", "(", "ConsumerCoordinator", ",", "self", ")", ".", "need_rejoin", "(", ")" ]
Execute an HTTP POST request against the API endpoints . This method is meant for internal use .
def execute_post ( self , resource , * * kwargs ) : return self . _request ( resource , requests . post , * * kwargs ) . json ( )
5,186
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L391-L401
[ "def", "_getStickersTemplatesDirectory", "(", "self", ",", "resource_name", ")", ":", "templates_dir", "=", "queryResourceDirectory", "(", "\"stickers\"", ",", "resource_name", ")", ".", "directory", "if", "self", ".", "filter_by_type", ":", "templates_dir", "=", "templates_dir", "+", "\"/\"", "+", "self", ".", "filter_by_type", "return", "templates_dir" ]
Execute an HTTP PUT request against the API endpoints . This method is meant for internal use .
def execute_put ( self , resource , * * kwargs ) : return self . _request ( resource , requests . put , * * kwargs ) . json ( )
5,187
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L403-L413
[ "def", "_find_cgroup_mounts", "(", ")", ":", "try", ":", "with", "open", "(", "'/proc/mounts'", ",", "'rt'", ")", "as", "mountsFile", ":", "for", "mount", "in", "mountsFile", ":", "mount", "=", "mount", ".", "split", "(", "' '", ")", "if", "mount", "[", "2", "]", "==", "'cgroup'", ":", "mountpoint", "=", "mount", "[", "1", "]", "options", "=", "mount", "[", "3", "]", "for", "option", "in", "options", ".", "split", "(", "','", ")", ":", "if", "option", "in", "ALL_KNOWN_SUBSYSTEMS", ":", "yield", "(", "option", ",", "mountpoint", ")", "except", "IOError", ":", "logging", ".", "exception", "(", "'Cannot read /proc/mounts'", ")" ]
Execute an HTTP DELETE request against the API endpoints . This method is meant for internal use . Does not return anything but raises an exception when failed .
def execute_delete ( self , resource , * * kwargs ) : self . _request ( resource , requests . delete , * * kwargs )
5,188
https://github.com/rycus86/ghost-client/blob/863d332801d2c1b8e7ad4573c7b16db78a7f8c8d/ghost_client/api.py#L415-L425
[ "def", "_get_all_lengths", "(", "self", ")", ":", "utt_lengths", "=", "{", "}", "for", "utt_idx", "in", "self", ".", "utt_ids", ":", "per_container", "=", "[", "c", ".", "_file", "[", "utt_idx", "]", ".", "shape", "[", "0", "]", "for", "c", "in", "self", ".", "containers", "]", "utt_lengths", "[", "utt_idx", "]", "=", "tuple", "(", "per_container", ")", "return", "utt_lengths" ]
Calculates the edit distance between two tokens .
def token_distance ( t1 , t2 , initial_match_penalization ) : if isinstance ( t1 , NameInitial ) or isinstance ( t2 , NameInitial ) : if t1 . token == t2 . token : return 0 if t1 == t2 : return initial_match_penalization return 1.0 return _normalized_edit_dist ( t1 . token , t2 . token )
5,189
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/contrib/inspirehep/author_util.py#L60-L68
[ "def", "_make_compile_argv", "(", "self", ",", "compile_request", ")", ":", "sources_minus_headers", "=", "list", "(", "self", ".", "_iter_sources_minus_headers", "(", "compile_request", ")", ")", "if", "len", "(", "sources_minus_headers", ")", "==", "0", ":", "raise", "self", ".", "_HeaderOnlyLibrary", "(", ")", "compiler", "=", "compile_request", ".", "compiler", "compiler_options", "=", "compile_request", ".", "compiler_options", "# We are going to execute in the target output, so get absolute paths for everything.", "buildroot", "=", "get_buildroot", "(", ")", "# TODO: add -v to every compiler and linker invocation!", "argv", "=", "(", "[", "compiler", ".", "exe_filename", "]", "+", "compiler", ".", "extra_args", "+", "# TODO: If we need to produce static libs, don't add -fPIC! (could use Variants -- see #5788).", "[", "'-c'", ",", "'-fPIC'", "]", "+", "compiler_options", "+", "[", "'-I{}'", ".", "format", "(", "os", ".", "path", ".", "join", "(", "buildroot", ",", "inc_dir", ")", ")", "for", "inc_dir", "in", "compile_request", ".", "include_dirs", "]", "+", "[", "os", ".", "path", ".", "join", "(", "buildroot", ",", "src", ")", "for", "src", "in", "sources_minus_headers", "]", ")", "self", ".", "context", ".", "log", ".", "info", "(", "\"selected compiler exe name: '{}'\"", ".", "format", "(", "compiler", ".", "exe_filename", ")", ")", "self", ".", "context", ".", "log", ".", "debug", "(", "\"compile argv: {}\"", ".", "format", "(", "argv", ")", ")", "return", "argv" ]
Simple tokenizer function to be used with the normalizers .
def simple_tokenize ( name ) : last_names , first_names = name . split ( ',' ) last_names = _RE_NAME_TOKEN_SEPARATOR . split ( last_names ) first_names = _RE_NAME_TOKEN_SEPARATOR . split ( first_names ) first_names = [ NameToken ( n ) if len ( n ) > 1 else NameInitial ( n ) for n in first_names if n ] last_names = [ NameToken ( n ) if len ( n ) > 1 else NameInitial ( n ) for n in last_names if n ] return { 'lastnames' : last_names , 'nonlastnames' : first_names }
5,190
https://github.com/inveniosoftware-contrib/json-merger/blob/adc6d372da018427e1db7b92424d3471e01a4118/json_merger/contrib/inspirehep/author_util.py#L71-L82
[ "def", "set_output_data_rate", "(", "self", ",", "output_data_rate", ")", ":", "# self.standby()", "register", "=", "self", ".", "MMA8452Q_Register", "[", "'CTRL_REG1'", "]", "self", ".", "board", ".", "i2c_read_request", "(", "self", ".", "address", ",", "register", ",", "1", ",", "Constants", ".", "I2C_READ", "|", "Constants", ".", "I2C_END_TX_MASK", ",", "self", ".", "data_val", ",", "Constants", ".", "CB_TYPE_DIRECT", ")", "control_reg", "=", "self", ".", "wait_for_read_result", "(", ")", "control_reg", "=", "control_reg", "[", "self", ".", "data_start", "]", "control_reg", "&=", "0xC7", "# Mask out data rate bits", "control_reg", "|=", "(", "output_data_rate", "<<", "3", ")", "self", ".", "board", ".", "i2c_write_request", "(", "self", ".", "address", ",", "[", "register", ",", "control_reg", "]", ")" ]
Calculates descriptors such as logP charges and MR and saves that in a dictionary .
def calculate_descriptors ( self , mol ) : #make dictionary self . ligand_atoms = { index : { "name" : x . name } for index , x in enumerate ( self . topology_data . universe . ligand_noH . atoms ) } #Calculate logP and MR contribs = self . calculate_logP ( mol ) #Calculate Gasteiger charges self . calculate_Gasteiger_charges ( mol ) #Calculate formal charges fcharges = self . calculate_formal_charge ( mol ) for atom in self . ligand_atoms . keys ( ) : self . ligand_atoms [ atom ] [ "logP" ] = contribs [ atom ] [ 0 ] self . ligand_atoms [ atom ] [ "MR" ] = contribs [ atom ] [ 1 ] self . ligand_atoms [ atom ] [ "Gasteiger_ch" ] = mol . GetAtomWithIdx ( atom ) . GetProp ( "_GasteigerCharge" ) self . ligand_atoms [ atom ] [ "Formal charges" ] = fcharges [ atom ] #Determine rotatable bonds self . rot_bonds = self . get_rotatable_bonds ( mol )
5,191
https://github.com/ldomic/lintools/blob/d825a4a7b35f3f857d3b81b46c9aee72b0ec697a/lintools/ligand_description.py#L23-L44
[ "def", "_expiration_generator", "(", "timeout_generator", ":", "Iterable", "[", "float", "]", ",", "now", ":", "Callable", "[", "[", "]", ",", "float", "]", "=", "time", ".", "time", ",", ")", "->", "Iterator", "[", "bool", "]", ":", "for", "timeout", "in", "timeout_generator", ":", "_next", "=", "now", "(", ")", "+", "timeout", "# next value is now + next generated timeout", "yield", "True", "while", "now", "(", ")", "<", "_next", ":", "# yield False while next is still in the future", "yield", "False" ]
Return all variants in the VCF .
def variants ( self , case_id , skip = 0 , count = 1000 , filters = None ) : filters = filters or { } case_obj = self . case ( case_id = case_id ) limit = count + skip genes = set ( ) if filters . get ( 'gene_ids' ) : genes = set ( [ gene_id . strip ( ) for gene_id in filters [ 'gene_ids' ] ] ) frequency = None if filters . get ( 'frequency' ) : frequency = float ( filters [ 'frequency' ] ) cadd = None if filters . get ( 'cadd' ) : cadd = float ( filters [ 'cadd' ] ) genetic_models = None if filters . get ( 'genetic_models' ) : genetic_models = set ( filters [ 'genetic_models' ] ) sv_len = None if filters . get ( 'sv_len' ) : sv_len = float ( filters [ 'sv_len' ] ) impact_severities = None if filters . get ( 'impact_severities' ) : impact_severities = set ( filters [ 'impact_severities' ] ) vcf_file_path = case_obj . variant_source self . head = get_header ( vcf_file_path ) self . vep_header = self . head . vep_columns self . snpeff_header = self . head . snpeff_columns variants = self . _get_filtered_variants ( vcf_file_path , filters ) result = [ ] skip_index = 0 for index , variant in enumerate ( variants ) : index += 1 if skip_index >= skip : variant_obj = self . _format_variants ( variant = variant , index = index , case_obj = case_obj , ) if genes and variant_obj : if not set ( variant_obj [ 'gene_symbols' ] ) . intersection ( genes ) : variant_obj = None if impact_severities and variant_obj : if not variant_obj [ 'impact_severity' ] in impact_severities : variant_obj = None if frequency and variant_obj : if variant_obj . max_freq > frequency : variant_obj = None if cadd and variant_obj : if variant_obj [ 'cadd_score' ] < cadd : variant_obj = None if genetic_models and variant_obj : models = set ( variant_obj . genetic_models ) if not models . intersection ( genetic_models ) : variant_obj = None if sv_len and variant_obj : if variant_obj . sv_len < sv_len : variant_obj = None if variant_obj : skip_index += 1 if skip_index <= limit : result . append ( variant_obj ) else : break else : skip_index += 1 return Results ( result , len ( result ) )
5,192
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/vcf/mixins/variant_mixin.py#L54-L164
[ "def", "server_close", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "\"Closing the socket server connection.\"", ")", "TCPServer", ".", "server_close", "(", "self", ")", "self", ".", "queue_manager", ".", "close", "(", ")", "self", ".", "topic_manager", ".", "close", "(", ")", "if", "hasattr", "(", "self", ".", "authenticator", ",", "'close'", ")", ":", "self", ".", "authenticator", ".", "close", "(", ")", "self", ".", "shutdown", "(", ")" ]
Check if variants follows the filters
def _get_filtered_variants ( self , vcf_file_path , filters = { } ) : genes = set ( ) consequences = set ( ) sv_types = set ( ) if filters . get ( 'gene_ids' ) : genes = set ( [ gene_id . strip ( ) for gene_id in filters [ 'gene_ids' ] ] ) if filters . get ( 'consequence' ) : consequences = set ( filters [ 'consequence' ] ) if filters . get ( 'sv_types' ) : sv_types = set ( filters [ 'sv_types' ] ) logger . info ( "Get variants from {0}" . format ( vcf_file_path ) ) if filters . get ( 'range' ) : range_str = "{0}:{1}-{2}" . format ( filters [ 'range' ] [ 'chromosome' ] , filters [ 'range' ] [ 'start' ] , filters [ 'range' ] [ 'end' ] ) vcf = VCF ( vcf_file_path ) handle = vcf ( range_str ) else : handle = VCF ( vcf_file_path ) for variant in handle : variant_line = str ( variant ) keep_variant = True if genes and keep_variant : keep_variant = False for gene in genes : if "{0}" . format ( gene ) in variant_line : keep_variant = True break if consequences and keep_variant : keep_variant = False for consequence in consequences : if consequence in variant_line : keep_variant = True break if sv_types and keep_variant : keep_variant = False for sv_type in sv_types : if sv_type in variant_line : keep_variant = True break if keep_variant : yield variant
5,193
https://github.com/robinandeer/puzzle/blob/9476f05b416d3a5135d25492cb31411fdf831c58/puzzle/plugins/vcf/mixins/variant_mixin.py#L166-L231
[ "def", "reserve_udp_port", "(", "self", ",", "port", ",", "project", ")", ":", "if", "port", "in", "self", ".", "_used_udp_ports", ":", "raise", "HTTPConflict", "(", "text", "=", "\"UDP port {} already in use on host {}\"", ".", "format", "(", "port", ",", "self", ".", "_console_host", ")", ")", "if", "port", "<", "self", ".", "_udp_port_range", "[", "0", "]", "or", "port", ">", "self", ".", "_udp_port_range", "[", "1", "]", ":", "raise", "HTTPConflict", "(", "text", "=", "\"UDP port {} is outside the range {}-{}\"", ".", "format", "(", "port", ",", "self", ".", "_udp_port_range", "[", "0", "]", ",", "self", ".", "_udp_port_range", "[", "1", "]", ")", ")", "self", ".", "_used_udp_ports", ".", "add", "(", "port", ")", "project", ".", "record_udp_port", "(", "port", ")", "log", ".", "debug", "(", "\"UDP port {} has been reserved\"", ".", "format", "(", "port", ")", ")" ]
Core FNV hash algorithm used in FNV0 and FNV1 .
def fnv ( data , hval_init , fnv_prime , fnv_size ) : assert isinstance ( data , bytes ) hval = hval_init for byte in data : hval = ( hval * fnv_prime ) % fnv_size hval = hval ^ _get_byte ( byte ) return hval
5,194
https://github.com/znerol/py-fnvhash/blob/ea6d6993e1082dee2ca3b9aba7a7eb2b7ab6a52a/fnvhash/__init__.py#L26-L36
[ "def", "on_exception", "(", "self", ",", "exception", ")", ":", "logger", ".", "error", "(", "'Exception from stream!'", ",", "exc_info", "=", "True", ")", "self", ".", "streaming_exception", "=", "exception" ]
Send session_prepare signal in session before_commit .
def session_prepare ( self , session , _ ) : if not hasattr ( session , 'meepo_unique_id' ) : self . _session_init ( session ) evt = collections . defaultdict ( set ) for action in ( "write" , "update" , "delete" ) : objs = getattr ( session , "pending_%s" % action ) # filter tables if possible if self . tables : objs = [ o for o in objs if o . __table__ . fullname in self . tables ] for obj in objs : evt_name = "%s_%s" % ( obj . __table__ . fullname , action ) evt [ evt_name ] . add ( obj ) self . logger . debug ( "%s - session_prepare: %s -> %s" % ( session . meepo_unique_id , evt_name , evt ) ) # only trigger signal when event exists if evt : signal ( "session_prepare" ) . send ( session , event = evt )
5,195
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/pub.py#L75-L100
[ "def", "random_stochastic_matrix", "(", "n", ",", "k", "=", "None", ",", "sparse", "=", "False", ",", "format", "=", "'csr'", ",", "random_state", "=", "None", ")", ":", "P", "=", "_random_stochastic_matrix", "(", "m", "=", "n", ",", "n", "=", "n", ",", "k", "=", "k", ",", "sparse", "=", "sparse", ",", "format", "=", "format", ",", "random_state", "=", "random_state", ")", "return", "P" ]
Send session_commit signal in sqlalchemy before_commit .
def session_commit ( self , session ) : # this may happen when there's nothing to commit if not hasattr ( session , 'meepo_unique_id' ) : self . logger . debug ( "skipped - session_commit" ) return # normal session pub self . logger . debug ( "%s - session_commit" % session . meepo_unique_id ) self . _session_pub ( session ) signal ( "session_commit" ) . send ( session ) self . _session_del ( session )
5,196
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/pub.py#L102-L117
[ "def", "read_gwf", "(", "filename", ",", "channels", ",", "start", "=", "None", ",", "end", "=", "None", ",", "scaled", "=", "None", ",", "ctype", "=", "None", ",", "series_class", "=", "TimeSeries", ")", ":", "# parse kwargs", "if", "not", "start", ":", "start", "=", "0", "if", "not", "end", ":", "end", "=", "0", "span", "=", "Segment", "(", "start", ",", "end", ")", "# open file", "stream", "=", "io_gwf", ".", "open_gwf", "(", "filename", ",", "'r'", ")", "nframes", "=", "stream", ".", "GetNumberOfFrames", "(", ")", "# find channels", "out", "=", "series_class", ".", "DictClass", "(", ")", "# loop over frames in GWF", "i", "=", "0", "while", "True", ":", "this", "=", "i", "i", "+=", "1", "# read frame", "try", ":", "frame", "=", "stream", ".", "ReadFrameNSubset", "(", "this", ",", "0", ")", "except", "IndexError", ":", "if", "this", ">=", "nframes", ":", "break", "raise", "# check whether we need this frame at all", "if", "not", "_need_frame", "(", "frame", ",", "start", ",", "end", ")", ":", "continue", "# get epoch for this frame", "epoch", "=", "LIGOTimeGPS", "(", "*", "frame", ".", "GetGTime", "(", ")", ")", "# and read all the channels", "for", "channel", "in", "channels", ":", "_scaled", "=", "_dynamic_scaled", "(", "scaled", ",", "channel", ")", "try", ":", "new", "=", "_read_channel", "(", "stream", ",", "this", ",", "str", "(", "channel", ")", ",", "ctype", ".", "get", "(", "channel", ",", "None", ")", ",", "epoch", ",", "start", ",", "end", ",", "scaled", "=", "_scaled", ",", "series_class", "=", "series_class", ")", "except", "_Skip", ":", "# don't need this frame for this channel", "continue", "try", ":", "out", "[", "channel", "]", ".", "append", "(", "new", ")", "except", "KeyError", ":", "out", "[", "channel", "]", "=", "numpy", ".", "require", "(", "new", ",", "requirements", "=", "[", "'O'", "]", ")", "# if we have all of the data we want, stop now", "if", "all", "(", "span", "in", "out", "[", "channel", "]", ".", "span", "for", "channel", "in", "out", ")", ":", "break", "# if any channels weren't read, something went wrong", "for", "channel", "in", "channels", ":", "if", "channel", "not", "in", "out", ":", "msg", "=", "\"Failed to read {0!r} from {1!r}\"", ".", "format", "(", "str", "(", "channel", ")", ",", "filename", ")", "if", "start", "or", "end", ":", "msg", "+=", "' for {0}'", ".", "format", "(", "span", ")", "raise", "ValueError", "(", "msg", ")", "return", "out" ]
Send session_rollback signal in sqlalchemy after_rollback .
def session_rollback ( self , session ) : # this may happen when there's nothing to rollback if not hasattr ( session , 'meepo_unique_id' ) : self . logger . debug ( "skipped - session_rollback" ) return # del session meepo id after rollback self . logger . debug ( "%s - after_rollback" % session . meepo_unique_id ) signal ( "session_rollback" ) . send ( session ) self . _session_del ( session )
5,197
https://github.com/eleme/meepo/blob/8212f0fe9b1d44be0c5de72d221a31c1d24bfe7a/meepo/apps/eventsourcing/pub.py#L119-L133
[ "def", "api_start", "(", "working_dir", ",", "host", ",", "port", ",", "thread", "=", "True", ")", ":", "api_srv", "=", "BlockstackdAPIServer", "(", "working_dir", ",", "host", ",", "port", ")", "log", ".", "info", "(", "\"Starting API server on port {}\"", ".", "format", "(", "port", ")", ")", "if", "thread", ":", "api_srv", ".", "start", "(", ")", "return", "api_srv" ]
Process fig and ax arguments .
def process_fig_and_ax_argument ( fig , ax , default_figsize = None ) : if default_figsize is not None : assert type ( default_figsize ) in [ tuple , list ] assert len ( default_figsize ) == 2 if ( fig is None ) and ( ax is None ) : fig , ax = plt . subplots ( figsize = default_figsize ) else : assert ( is_figure ( fig ) ) and ( is_axes ( ax ) ) return fig , ax
5,198
https://github.com/jam31118/vis/blob/965ebec102c539b323d5756fef04153ac71e50d9/vis/plot.py#L123-L139
[ "def", "_configure_manager", "(", "self", ")", ":", "self", ".", "_manager", "=", "CloudBlockStorageManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolume", ",", "response_key", "=", "\"volume\"", ",", "uri_base", "=", "\"volumes\"", ")", "self", ".", "_types_manager", "=", "BaseManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageVolumeType", ",", "response_key", "=", "\"volume_type\"", ",", "uri_base", "=", "\"types\"", ")", "self", ".", "_snapshot_manager", "=", "CloudBlockStorageSnapshotManager", "(", "self", ",", "resource_class", "=", "CloudBlockStorageSnapshot", ",", "response_key", "=", "\"snapshot\"", ",", "uri_base", "=", "\"snapshots\"", ")" ]
Return N - dimensional square s limits
def get_square_axes_limits ( coords , margin = 0.05 ) : #coords = [x,y,z] try : coords = [ np . array ( coord ) for coord in coords ] except : raise Exception ( "Failed to convert elements of 'coords' into numpy.array" ) lims = [ ( coord . min ( ) , coord . max ( ) ) for coord in coords ] mids = [ 0.5 * ( lim [ 0 ] + lim [ 1 ] ) for lim in lims ] widths = [ 0.5 * ( lim [ 1 ] - lim [ 0 ] ) for lim in lims ] max_width = max ( widths ) max_width += max_width * margin ax_lims = tuple ( ( mid - max_width , mid + max_width ) for mid in mids ) #xlim, ylim, zlim = ax_lims return ax_lims
5,199
https://github.com/jam31118/vis/blob/965ebec102c539b323d5756fef04153ac71e50d9/vis/plot.py#L209-L246
[ "def", "ParseSmsRow", "(", "self", ",", "parser_mediator", ",", "query", ",", "row", ",", "*", "*", "unused_kwargs", ")", ":", "query_hash", "=", "hash", "(", "query", ")", "sms_read", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'read'", ")", "sms_type", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'type'", ")", "event_data", "=", "AndroidSMSEventData", "(", ")", "event_data", ".", "address", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'address'", ")", "event_data", ".", "body", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'body'", ")", "event_data", ".", "offset", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'id'", ")", "event_data", ".", "query", "=", "query", "event_data", ".", "sms_read", "=", "self", ".", "SMS_READ", ".", "get", "(", "sms_read", ",", "'UNKNOWN'", ")", "event_data", ".", "sms_type", "=", "self", ".", "SMS_TYPE", ".", "get", "(", "sms_type", ",", "'UNKNOWN'", ")", "timestamp", "=", "self", ".", "_GetRowValue", "(", "query_hash", ",", "row", ",", "'date'", ")", "date_time", "=", "dfdatetime_java_time", ".", "JavaTime", "(", "timestamp", "=", "timestamp", ")", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "date_time", ",", "definitions", ".", "TIME_DESCRIPTION_CREATION", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")" ]