query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
Load permissions associated to actions .
def _load_permissions ( self ) : result = _P ( needs = set ( ) , excludes = set ( ) ) if not self . allow_by_default : result . needs . update ( self . explicit_needs ) for explicit_need in self . explicit_needs : if explicit_need . method == 'action' : action = current_access . get_action_cache ( self . _cache_key ( explicit_need ) ) if action is None : action = _P ( needs = set ( ) , excludes = set ( ) ) actionsusers = ActionUsers . query_by_action ( explicit_need ) . all ( ) actionsroles = ActionRoles . query_by_action ( explicit_need ) . join ( ActionRoles . role ) . all ( ) actionssystem = ActionSystemRoles . query_by_action ( explicit_need ) . all ( ) for db_action in chain ( actionsusers , actionsroles , actionssystem ) : if db_action . exclude : action . excludes . add ( db_action . need ) else : action . needs . add ( db_action . need ) current_access . set_action_cache ( self . _cache_key ( explicit_need ) , action ) # in-place update of results result . update ( action ) elif self . allow_by_default : result . needs . add ( explicit_need ) self . _permissions = result
2,800
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/permissions.py#L122-L165
[ "def", "read_avro", "(", "file_path_or_buffer", ",", "schema", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "file_path_or_buffer", ",", "six", ".", "string_types", ")", ":", "with", "open", "(", "file_path_or_buffer", ",", "'rb'", ")", "as", "f", ":", "return", "__file_to_dataframe", "(", "f", ",", "schema", ",", "*", "*", "kwargs", ")", "else", ":", "return", "__file_to_dataframe", "(", "file_path_or_buffer", ",", "schema", ",", "*", "*", "kwargs", ")" ]
Decorate function to return LazyProxy .
def lazy_result ( f ) : @ wraps ( f ) def decorated ( ctx , param , value ) : return LocalProxy ( lambda : f ( ctx , param , value ) ) return decorated
2,801
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L30-L35
[ "def", "save_and_validate_logo", "(", "logo_stream", ",", "logo_filename", ",", "community_id", ")", ":", "cfg", "=", "current_app", ".", "config", "logos_bucket_id", "=", "cfg", "[", "'COMMUNITIES_BUCKET_UUID'", "]", "logo_max_size", "=", "cfg", "[", "'COMMUNITIES_LOGO_MAX_SIZE'", "]", "logos_bucket", "=", "Bucket", ".", "query", ".", "get", "(", "logos_bucket_id", ")", "ext", "=", "os", ".", "path", ".", "splitext", "(", "logo_filename", ")", "[", "1", "]", "ext", "=", "ext", "[", "1", ":", "]", "if", "ext", ".", "startswith", "(", "'.'", ")", "else", "ext", "logo_stream", ".", "seek", "(", "SEEK_SET", ",", "SEEK_END", ")", "# Seek from beginning to end", "logo_size", "=", "logo_stream", ".", "tell", "(", ")", "if", "logo_size", ">", "logo_max_size", ":", "return", "None", "if", "ext", "in", "cfg", "[", "'COMMUNITIES_LOGO_EXTENSIONS'", "]", ":", "key", "=", "\"{0}/logo.{1}\"", ".", "format", "(", "community_id", ",", "ext", ")", "logo_stream", ".", "seek", "(", "0", ")", "# Rewind the stream to the beginning", "ObjectVersion", ".", "create", "(", "logos_bucket", ",", "key", ",", "stream", "=", "logo_stream", ",", "size", "=", "logo_size", ")", "return", "ext", "else", ":", "return", "None" ]
Return an action if exists .
def process_action ( ctx , param , value ) : actions = current_app . extensions [ 'invenio-access' ] . actions if value not in actions : raise click . BadParameter ( 'Action "%s" is not registered.' , value ) return actions [ value ]
2,802
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L39-L44
[ "def", "unwrap_synchro", "(", "fn", ")", ":", "@", "functools", ".", "wraps", "(", "fn", ")", "def", "_unwrap_synchro", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "_unwrap_obj", "(", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "Synchro", ")", ":", "return", "obj", ".", "delegate", "else", ":", "return", "obj", "args", "=", "[", "_unwrap_obj", "(", "arg", ")", "for", "arg", "in", "args", "]", "kwargs", "=", "dict", "(", "[", "(", "key", ",", "_unwrap_obj", "(", "value", ")", ")", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", "]", ")", "return", "fn", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "_unwrap_synchro" ]
Return an user if it exists .
def process_email ( ctx , param , value ) : user = User . query . filter ( User . email == value ) . first ( ) if not user : raise click . BadParameter ( 'User with email \'%s\' not found.' , value ) return user
2,803
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L48-L53
[ "def", "GetAttachmentIdFromMediaId", "(", "media_id", ")", ":", "altchars", "=", "'+-'", "if", "not", "six", ".", "PY2", ":", "altchars", "=", "altchars", ".", "encode", "(", "'utf-8'", ")", "# altchars for '+' and '/'. We keep '+' but replace '/' with '-'", "buffer", "=", "base64", ".", "b64decode", "(", "str", "(", "media_id", ")", ",", "altchars", ")", "resoure_id_length", "=", "20", "attachment_id", "=", "''", "if", "len", "(", "buffer", ")", ">", "resoure_id_length", ":", "# We are cutting off the storage index.", "attachment_id", "=", "base64", ".", "b64encode", "(", "buffer", "[", "0", ":", "resoure_id_length", "]", ",", "altchars", ")", "if", "not", "six", ".", "PY2", ":", "attachment_id", "=", "attachment_id", ".", "decode", "(", "'utf-8'", ")", "else", ":", "attachment_id", "=", "media_id", "return", "attachment_id" ]
Return a role if it exists .
def process_role ( ctx , param , value ) : role = Role . query . filter ( Role . name == value ) . first ( ) if not role : raise click . BadParameter ( 'Role with name \'%s\' not found.' , value ) return role
2,804
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L57-L62
[ "def", "edit_ticket", "(", "self", ",", "ticket_id", ",", "*", "*", "kwargs", ")", ":", "post_data", "=", "''", "for", "key", ",", "value", "in", "iteritems", "(", "kwargs", ")", ":", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "value", "=", "\", \"", ".", "join", "(", "value", ")", "if", "key", "[", ":", "3", "]", "!=", "'CF_'", ":", "post_data", "+=", "\"{}: {}\\n\"", ".", "format", "(", "key", ",", "value", ")", "else", ":", "post_data", "+=", "\"CF.{{{}}}: {}\\n\"", ".", "format", "(", "key", "[", "3", ":", "]", ",", "value", ")", "msg", "=", "self", ".", "__request", "(", "'ticket/{}/edit'", ".", "format", "(", "str", "(", "ticket_id", ")", ")", ",", "post_data", "=", "{", "'content'", ":", "post_data", "}", ")", "state", "=", "msg", ".", "split", "(", "'\\n'", ")", "[", "2", "]", "return", "self", ".", "RE_PATTERNS", "[", "'update_pattern'", "]", ".", "match", "(", "state", ")", "is", "not", "None" ]
Allow a user identified by an email address .
def allow_user ( user ) : def processor ( action , argument ) : db . session . add ( ActionUsers . allow ( action , argument = argument , user_id = user . id ) ) return processor
2,805
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L108-L114
[ "def", "write", "(", "self", ",", "basename", "=", "None", ",", "write_separate_manifests", "=", "True", ")", ":", "self", ".", "check_files", "(", ")", "n", "=", "0", "for", "manifest", "in", "self", ".", "partition_dumps", "(", ")", ":", "dumpbase", "=", "\"%s%05d\"", "%", "(", "basename", ",", "n", ")", "dumpfile", "=", "\"%s.%s\"", "%", "(", "dumpbase", ",", "self", ".", "format", ")", "if", "(", "write_separate_manifests", ")", ":", "manifest", ".", "write", "(", "basename", "=", "dumpbase", "+", "'.xml'", ")", "if", "(", "self", ".", "format", "==", "'zip'", ")", ":", "self", ".", "write_zip", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "elif", "(", "self", ".", "format", "==", "'warc'", ")", ":", "self", ".", "write_warc", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "else", ":", "raise", "DumpError", "(", "\"Unknown dump format requested (%s)\"", "%", "(", "self", ".", "format", ")", ")", "n", "+=", "1", "self", ".", "logger", ".", "info", "(", "\"Wrote %d dump files\"", "%", "(", "n", ")", ")", "return", "(", "n", ")" ]
Allow a role identified by an email address .
def allow_role ( role ) : def processor ( action , argument ) : db . session . add ( ActionRoles . allow ( action , argument = argument , role_id = role . id ) ) return processor
2,806
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L119-L125
[ "def", "_download_video", "(", "self", ",", "video_url", ",", "video_name", ")", ":", "filename", "=", "'{0:0=2d}_{1}'", ".", "format", "(", "DriverWrappersPool", ".", "videos_number", ",", "video_name", ")", "filename", "=", "'{}.mp4'", ".", "format", "(", "get_valid_filename", "(", "filename", ")", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "DriverWrappersPool", ".", "videos_directory", ",", "filename", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "DriverWrappersPool", ".", "videos_directory", ")", ":", "os", ".", "makedirs", "(", "DriverWrappersPool", ".", "videos_directory", ")", "response", "=", "requests", ".", "get", "(", "video_url", ")", "open", "(", "filepath", ",", "'wb'", ")", ".", "write", "(", "response", ".", "content", ")", "self", ".", "logger", ".", "info", "(", "\"Video saved in '%s'\"", ",", "filepath", ")", "DriverWrappersPool", ".", "videos_number", "+=", "1" ]
Process allow action .
def process_allow_action ( processors , action , argument ) : for processor in processors : processor ( action , argument ) db . session . commit ( )
2,807
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L130-L134
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Deny a user identified by an email address .
def deny_user ( user ) : def processor ( action , argument ) : db . session . add ( ActionUsers . deny ( action , argument = argument , user_id = user . id ) ) return processor
2,808
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L149-L155
[ "def", "write", "(", "self", ",", "basename", "=", "None", ",", "write_separate_manifests", "=", "True", ")", ":", "self", ".", "check_files", "(", ")", "n", "=", "0", "for", "manifest", "in", "self", ".", "partition_dumps", "(", ")", ":", "dumpbase", "=", "\"%s%05d\"", "%", "(", "basename", ",", "n", ")", "dumpfile", "=", "\"%s.%s\"", "%", "(", "dumpbase", ",", "self", ".", "format", ")", "if", "(", "write_separate_manifests", ")", ":", "manifest", ".", "write", "(", "basename", "=", "dumpbase", "+", "'.xml'", ")", "if", "(", "self", ".", "format", "==", "'zip'", ")", ":", "self", ".", "write_zip", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "elif", "(", "self", ".", "format", "==", "'warc'", ")", ":", "self", ".", "write_warc", "(", "manifest", ".", "resources", ",", "dumpfile", ")", "else", ":", "raise", "DumpError", "(", "\"Unknown dump format requested (%s)\"", "%", "(", "self", ".", "format", ")", ")", "n", "+=", "1", "self", ".", "logger", ".", "info", "(", "\"Wrote %d dump files\"", "%", "(", "n", ")", ")", "return", "(", "n", ")" ]
Deny a role identified by an email address .
def deny_role ( role ) : def processor ( action , argument ) : db . session . add ( ActionRoles . deny ( action , argument = argument , role_id = role . id ) ) return processor
2,809
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L160-L166
[ "def", "_download_video", "(", "self", ",", "video_url", ",", "video_name", ")", ":", "filename", "=", "'{0:0=2d}_{1}'", ".", "format", "(", "DriverWrappersPool", ".", "videos_number", ",", "video_name", ")", "filename", "=", "'{}.mp4'", ".", "format", "(", "get_valid_filename", "(", "filename", ")", ")", "filepath", "=", "os", ".", "path", ".", "join", "(", "DriverWrappersPool", ".", "videos_directory", ",", "filename", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "DriverWrappersPool", ".", "videos_directory", ")", ":", "os", ".", "makedirs", "(", "DriverWrappersPool", ".", "videos_directory", ")", "response", "=", "requests", ".", "get", "(", "video_url", ")", "open", "(", "filepath", ",", "'wb'", ")", ".", "write", "(", "response", ".", "content", ")", "self", ".", "logger", ".", "info", "(", "\"Video saved in '%s'\"", ",", "filepath", ")", "DriverWrappersPool", ".", "videos_number", "+=", "1" ]
Process deny action .
def process_deny_action ( processors , action , argument ) : for processor in processors : processor ( action , argument ) db . session . commit ( )
2,810
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L171-L175
[ "def", "volumes_delete", "(", "storage_pool", ",", "logger", ")", ":", "try", ":", "for", "vol_name", "in", "storage_pool", ".", "listVolumes", "(", ")", ":", "try", ":", "vol", "=", "storage_pool", ".", "storageVolLookupByName", "(", "vol_name", ")", "vol", ".", "delete", "(", "0", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volume %s.\"", ",", "vol_name", ")", "except", "libvirt", ".", "libvirtError", ":", "logger", ".", "exception", "(", "\"Unable to delete storage volumes.\"", ")" ]
Remove global action rule .
def remove_global ( ) : def processor ( action , argument ) : ActionUsers . query_by_action ( action , argument = argument ) . filter ( ActionUsers . user_id . is_ ( None ) ) . delete ( synchronize_session = False ) return processor
2,811
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L193-L199
[ "def", "detach_storage", "(", "self", ",", "server", ",", "address", ")", ":", "body", "=", "{", "'storage_device'", ":", "{", "'address'", ":", "address", "}", "}", "url", "=", "'/server/{0}/storage/detach'", ".", "format", "(", "server", ")", "res", "=", "self", ".", "post_request", "(", "url", ",", "body", ")", "return", "Storage", ".", "_create_storage_objs", "(", "res", "[", "'server'", "]", "[", "'storage_devices'", "]", ",", "cloud_manager", "=", "self", ")" ]
Remove a action for a user .
def remove_user ( user ) : def processor ( action , argument ) : ActionUsers . query_by_action ( action , argument = argument ) . filter ( ActionUsers . user_id == user . id ) . delete ( synchronize_session = False ) return processor
2,812
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L204-L210
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_closed", ":", "return", "False", "log", ".", "info", "(", "\"{module}: '{name}' [{id}]: is closing\"", ".", "format", "(", "module", "=", "self", ".", "manager", ".", "module_name", ",", "name", "=", "self", ".", "name", ",", "id", "=", "self", ".", "id", ")", ")", "if", "self", ".", "_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_console", ",", "self", ".", "_project", ")", "self", ".", "_console", "=", "None", "if", "self", ".", "_wrap_console", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_internal_console_port", ",", "self", ".", "_project", ")", "self", ".", "_internal_console_port", "=", "None", "if", "self", ".", "_aux", ":", "self", ".", "_manager", ".", "port_manager", ".", "release_tcp_port", "(", "self", ".", "_aux", ",", "self", ".", "_project", ")", "self", ".", "_aux", "=", "None", "self", ".", "_closed", "=", "True", "return", "True" ]
Remove a action for a role .
def remove_role ( role ) : def processor ( action , argument ) : ActionRoles . query_by_action ( action , argument = argument ) . filter ( ActionRoles . role_id == role . id ) . delete ( synchronize_session = False ) return processor
2,813
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L215-L221
[ "def", "_close", "(", "self", ")", ":", "self", ".", "_usb_handle", ".", "releaseInterface", "(", ")", "try", ":", "# If we're using PyUSB >= 1.0 we can re-attach the kernel driver here.", "self", ".", "_usb_handle", ".", "dev", ".", "attach_kernel_driver", "(", "0", ")", "except", ":", "pass", "self", ".", "_usb_int", "=", "None", "self", ".", "_usb_handle", "=", "None", "return", "True" ]
Process action removals .
def process_remove_action ( processors , action , argument ) : for processor in processors : processor ( action , argument ) db . session . commit ( )
2,814
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L226-L230
[ "def", "read_secret_version", "(", "self", ",", "path", ",", "version", "=", "None", ",", "mount_point", "=", "DEFAULT_MOUNT_POINT", ")", ":", "params", "=", "{", "}", "if", "version", "is", "not", "None", ":", "params", "[", "'version'", "]", "=", "version", "api_path", "=", "'/v1/{mount_point}/data/{path}'", ".", "format", "(", "mount_point", "=", "mount_point", ",", "path", "=", "path", ")", "response", "=", "self", ".", "_adapter", ".", "get", "(", "url", "=", "api_path", ",", "params", "=", "params", ",", ")", "return", "response", ".", "json", "(", ")" ]
List all registered actions .
def list_actions ( ) : for name , action in _current_actions . items ( ) : click . echo ( '{0}:{1}' . format ( name , '*' if hasattr ( action , 'argument' ) else '' ) )
2,815
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L235-L240
[ "def", "_merge_pool_kwargs", "(", "self", ",", "override", ")", ":", "base_pool_kwargs", "=", "self", ".", "connection_pool_kw", ".", "copy", "(", ")", "if", "override", ":", "for", "key", ",", "value", "in", "override", ".", "items", "(", ")", ":", "if", "value", "is", "None", ":", "try", ":", "del", "base_pool_kwargs", "[", "key", "]", "except", "KeyError", ":", "pass", "else", ":", "base_pool_kwargs", "[", "key", "]", "=", "value", "return", "base_pool_kwargs" ]
Show all assigned actions .
def show_actions ( email , role ) : if email : actions = ActionUsers . query . join ( ActionUsers . user ) . filter ( User . email . in_ ( email ) ) . all ( ) for action in actions : click . secho ( 'user:{0}:{1}:{2}:{3}' . format ( action . user . email , action . action , '' if action . argument is None else action . argument , 'deny' if action . exclude else 'allow' , ) , fg = 'red' if action . exclude else 'green' ) if role : actions = ActionRoles . query . filter ( Role . name . in_ ( role ) ) . join ( ActionRoles . role ) . all ( ) for action in actions : click . secho ( 'role:{0}:{1}:{2}:{3}' . format ( action . role . name , action . action , '' if action . argument is None else action . argument , 'deny' if action . exclude else 'allow' , ) , fg = 'red' if action . exclude else 'green' )
2,816
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/cli.py#L247-L271
[ "def", "thaw_decrypt", "(", "vault_client", ",", "src_file", ",", "tmp_dir", ",", "opt", ")", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "opt", ".", "secrets", ")", ":", "LOG", ".", "info", "(", "\"Creating secret directory %s\"", ",", "opt", ".", "secrets", ")", "os", ".", "mkdir", "(", "opt", ".", "secrets", ")", "zip_file", "=", "\"%s/aomi.zip\"", "%", "tmp_dir", "if", "opt", ".", "gpg_pass_path", ":", "gpg_path_bits", "=", "opt", ".", "gpg_pass_path", ".", "split", "(", "'/'", ")", "gpg_path", "=", "'/'", ".", "join", "(", "gpg_path_bits", "[", "0", ":", "len", "(", "gpg_path_bits", ")", "-", "1", "]", ")", "gpg_field", "=", "gpg_path_bits", "[", "len", "(", "gpg_path_bits", ")", "-", "1", "]", "resp", "=", "vault_client", ".", "read", "(", "gpg_path", ")", "gpg_pass", "=", "None", "if", "resp", "and", "'data'", "in", "resp", "and", "gpg_field", "in", "resp", "[", "'data'", "]", ":", "gpg_pass", "=", "resp", "[", "'data'", "]", "[", "gpg_field", "]", "if", "not", "gpg_pass", ":", "raise", "aomi", ".", "exceptions", ".", "GPG", "(", "\"Unable to retrieve GPG password\"", ")", "LOG", ".", "debug", "(", "\"Retrieved GPG password from Vault\"", ")", "if", "not", "decrypt", "(", "src_file", ",", "zip_file", ",", "passphrase", "=", "gpg_pass", ")", ":", "raise", "aomi", ".", "exceptions", ".", "GPG", "(", "\"Unable to gpg\"", ")", "else", ":", "raise", "aomi", ".", "exceptions", ".", "VaultData", "(", "\"Unable to retrieve GPG password\"", ")", "else", ":", "if", "not", "decrypt", "(", "src_file", ",", "zip_file", ")", ":", "raise", "aomi", ".", "exceptions", ".", "GPG", "(", "\"Unable to gpg\"", ")", "return", "zip_file" ]
A wrapper for assess_mhc_genes .
def run_mhc_gene_assessment ( job , rsem_files , rna_haplotype , univ_options , reports_options ) : return job . addChildJobFn ( assess_mhc_genes , rsem_files [ 'rsem.genes.results' ] , rna_haplotype , univ_options , reports_options ) . rv ( )
2,817
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/addons/assess_mhc_pathway.py#L27-L39
[ "def", "save", "(", "self", ")", ":", "data", "=", "self", ".", "get_selected_item", "(", ")", "if", "'saved'", "not", "in", "data", ":", "self", ".", "term", ".", "flash", "(", ")", "elif", "not", "data", "[", "'saved'", "]", ":", "with", "self", ".", "term", ".", "loader", "(", "'Saving'", ")", ":", "data", "[", "'object'", "]", ".", "save", "(", ")", "if", "not", "self", ".", "term", ".", "loader", ".", "exception", ":", "data", "[", "'saved'", "]", "=", "True", "else", ":", "with", "self", ".", "term", ".", "loader", "(", "'Unsaving'", ")", ":", "data", "[", "'object'", "]", ".", "unsave", "(", ")", "if", "not", "self", ".", "term", ".", "loader", ".", "exception", ":", "data", "[", "'saved'", "]", "=", "False" ]
This module will parse the config file withing params and set up the variables that will be passed to the various tools in the pipeline .
def parse_config_file ( job , config_file ) : job . fileStore . logToMaster ( 'Parsing config file' ) config_file = os . path . abspath ( config_file ) if not os . path . exists ( config_file ) : raise ParameterError ( 'The config file was not found at specified location. Please verify ' + 'and retry.' ) # Initialize variables to hold the sample sets, the universal options, and the per-tool options sample_set = defaultdict ( ) univ_options = defaultdict ( ) tool_options = defaultdict ( ) # Read through the notes and the with open ( config_file , 'r' ) as conf : for line in conf : line = line . strip ( ) if line . startswith ( '##' ) or len ( line ) == 0 : continue if line . startswith ( 'BEGIN' ) : break # The generator function tool_specific_param_generator will yield one group name at a time # along with it's parameters. for groupname , group_params in tool_specific_param_generator ( job , conf ) : if groupname == 'patient' : if 'patient_id' not in group_params . keys ( ) : raise ParameterError ( 'A patient group is missing the patient_id flag.' ) sample_set [ group_params [ 'patient_id' ] ] = group_params elif groupname == 'Universal_Options' : univ_options = group_params required_options = { 'java_Xmx' , 'output_folder' , 'storage_location' } missing_opts = required_options . difference ( set ( univ_options . keys ( ) ) ) if len ( missing_opts ) > 0 : raise ParameterError ( ' The following options have no arguments in the config ' 'file :\n' + '\n' . join ( missing_opts ) ) if univ_options [ 'sse_key_is_master' ] : assert univ_options [ 'sse_key_is_master' ] in ( 'True' , 'true' , 'False' , 'false' ) univ_options [ 'sse_key_is_master' ] = univ_options [ 'sse_key_is_master' ] in ( 'True' , 'true' ) # If it isn't any of the above, it's a tool group else : tool_options [ groupname ] = group_params # Ensure that all tools have been provided options. required_tools = { 'cutadapt' , 'bwa' , 'star' , 'phlat' , 'transgene' , 'mut_callers' , 'rsem' , 'mhci' , 'mhcii' , 'snpeff' , 'rank_boost' } # 'fusion', 'indels'} missing_tools = required_tools . difference ( set ( tool_options . keys ( ) ) ) if len ( missing_tools ) > 0 : raise ParameterError ( ' The following tools have no arguments in the config file : \n' + '\n' . join ( missing_tools ) ) # Start a job for each sample in the sample set for patient_id in sample_set . keys ( ) : job . addFollowOnJobFn ( pipeline_launchpad , sample_set [ patient_id ] , univ_options , tool_options ) return None
2,818
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L46-L109
[ "def", "unshare", "(", "flags", ")", ":", "res", "=", "lib", ".", "unshare", "(", "flags", ")", "if", "res", "!=", "0", ":", "_check_error", "(", "ffi", ".", "errno", ")" ]
This module runs cutadapt on the input RNA fastq files and then calls the RNA aligners .
def run_cutadapt ( job , fastqs , univ_options , cutadapt_options ) : job . fileStore . logToMaster ( 'Running cutadapt on %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) fq_extn = '.gz' if fastqs [ 'gzipped' ] else '' input_files = { 'rna_1.fastq' + fq_extn : fastqs [ 'tumor_rna' ] [ 0 ] , 'rna_2.fastq' + fq_extn : fastqs [ 'tumor_rna' ] [ 1 ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ '-a' , cutadapt_options [ 'a' ] , # Fwd read 3' adapter '-A' , cutadapt_options [ 'A' ] , # Rev read 3' adapter '-m' , '35' , # Minimum size of read '-o' , docker_path ( 'rna_cutadapt_1.fastq' ) , # Output for R1 '-p' , docker_path ( 'rna_cutadapt_2.fastq' ) , # Output for R2 input_files [ 'rna_1.fastq' ] , input_files [ 'rna_2.fastq' ] ] docker_call ( tool = 'cutadapt' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files = defaultdict ( ) for fastq_file in [ 'rna_cutadapt_1.fastq' , 'rna_cutadapt_2.fastq' ] : output_files [ fastq_file ] = job . fileStore . writeGlobalFile ( '/' . join ( [ work_dir , fastq_file ] ) ) return output_files
2,819
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L256-L298
[ "def", "stations", "(", "self", ",", "*", ",", "generated", "=", "True", ",", "library", "=", "True", ")", ":", "station_list", "=", "[", "]", "for", "chunk", "in", "self", ".", "stations_iter", "(", "page_size", "=", "49995", ")", ":", "for", "station", "in", "chunk", ":", "if", "(", "(", "generated", "and", "not", "station", ".", "get", "(", "'inLibrary'", ")", ")", "or", "(", "library", "and", "station", ".", "get", "(", "'inLibrary'", ")", ")", ")", ":", "station_list", ".", "append", "(", "station", ")", "return", "station_list" ]
This module uses STAR to align the RNA fastqs to the reference
def run_star ( job , fastqs , univ_options , star_options ) : assert star_options [ 'type' ] in ( 'star' , 'starlong' ) job . fileStore . logToMaster ( 'Running STAR on %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'rna_cutadapt_1.fastq' : fastqs [ 'rna_cutadapt_1.fastq' ] , 'rna_cutadapt_2.fastq' : fastqs [ 'rna_cutadapt_2.fastq' ] , 'star_index.tar.gz' : star_options [ 'index_tar' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ '--runThreadN' , str ( star_options [ 'n' ] ) , '--genomeDir' , input_files [ 'star_index' ] , '--outFileNamePrefix' , 'rna' , '--readFilesIn' , input_files [ 'rna_cutadapt_1.fastq' ] , input_files [ 'rna_cutadapt_2.fastq' ] , '--outSAMattributes' , 'NH' , 'HI' , 'AS' , 'NM' , 'MD' , '--outSAMtype' , 'BAM' , 'SortedByCoordinate' , '--quantMode' , 'TranscriptomeSAM' , '--outSAMunmapped' , 'Within' ] if star_options [ 'type' ] == 'star' : docker_call ( tool = 'star' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) else : docker_call ( tool = 'starlong' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files = defaultdict ( ) for bam_file in [ 'rnaAligned.toTranscriptome.out.bam' , 'rnaAligned.sortedByCoord.out.bam' ] : output_files [ bam_file ] = job . fileStore . writeGlobalFile ( '/' . join ( [ work_dir , bam_file ] ) ) job . fileStore . deleteGlobalFile ( fastqs [ 'rna_cutadapt_1.fastq' ] ) job . fileStore . deleteGlobalFile ( fastqs [ 'rna_cutadapt_2.fastq' ] ) index_star = job . wrapJobFn ( index_bamfile , output_files [ 'rnaAligned.sortedByCoord.out.bam' ] , 'rna' , univ_options , disk = '120G' ) job . addChild ( index_star ) output_files [ 'rnaAligned.sortedByCoord.out.bam' ] = index_star . rv ( ) return output_files
2,820
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L301-L361
[ "def", "_CountClientStatisticByLabel", "(", "self", ",", "day_buckets", ",", "extract_statistic_fn", ")", ":", "counts", "=", "collections", ".", "defaultdict", "(", "int", ")", "now", "=", "rdfvalue", ".", "RDFDatetime", ".", "Now", "(", ")", "for", "info", "in", "self", ".", "IterateAllClientsFullInfo", "(", "batch_size", "=", "db", ".", "MAX_COUNT", ")", ":", "if", "not", "info", ".", "metadata", ".", "ping", ":", "continue", "statistic_value", "=", "extract_statistic_fn", "(", "info", ")", "for", "client_label", "in", "info", ".", "GetLabelsNames", "(", "owner", "=", "\"GRR\"", ")", ":", "for", "day_bucket", "in", "day_buckets", ":", "time_boundary", "=", "now", "-", "rdfvalue", ".", "Duration", ".", "FromDays", "(", "day_bucket", ")", "if", "info", ".", "metadata", ".", "ping", ">", "time_boundary", ":", "# Count the client if it has been active in the last 'day_bucket'", "# days.", "counts", "[", "(", "statistic_value", ",", "client_label", ",", "day_bucket", ")", "]", "+=", "1", "return", "dict", "(", "counts", ")" ]
This module aligns the SAMPLE_TYPE dna fastqs to the reference
def run_bwa ( job , fastqs , sample_type , univ_options , bwa_options ) : job . fileStore . logToMaster ( 'Running bwa on %s:%s' % ( univ_options [ 'patient' ] , sample_type ) ) work_dir = job . fileStore . getLocalTempDir ( ) fq_extn = '.gz' if fastqs [ 'gzipped' ] else '' input_files = { 'dna_1.fastq' + fq_extn : fastqs [ sample_type ] [ 0 ] , 'dna_2.fastq' + fq_extn : fastqs [ sample_type ] [ 1 ] , 'bwa_index.tar.gz' : bwa_options [ 'index_tar' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ 'mem' , '-t' , str ( bwa_options [ 'n' ] ) , '-v' , '1' , # Don't print INFO messages to the stderr '/' . join ( [ input_files [ 'bwa_index' ] , 'hg19.fa' ] ) , input_files [ 'dna_1.fastq' ] , input_files [ 'dna_2.fastq' ] ] with open ( '' . join ( [ work_dir , '/' , sample_type , '_aligned.sam' ] ) , 'w' ) as samfile : docker_call ( tool = 'bwa' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , outfile = samfile ) # samfile.name retains the path info output_file = job . fileStore . writeGlobalFile ( samfile . name ) samfile_processing = job . wrapJobFn ( bam_conversion , output_file , sample_type , univ_options , disk = '60G' ) job . addChild ( samfile_processing ) # Return values get passed up the chain to here. The return value will be a dict with # SAMPLE_TYPE_fix_pg_sorted.bam: jobStoreID # SAMPLE_TYPE_fix_pg_sorted.bam.bai: jobStoreID return samfile_processing . rv ( )
2,821
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L364-L414
[ "def", "get_changed_devices", "(", "self", ",", "timestamp", ")", ":", "if", "timestamp", "is", "None", ":", "payload", "=", "{", "}", "else", ":", "payload", "=", "{", "'timeout'", ":", "SUBSCRIPTION_WAIT", ",", "'minimumdelay'", ":", "SUBSCRIPTION_MIN_WAIT", "}", "payload", ".", "update", "(", "timestamp", ")", "# double the timeout here so requests doesn't timeout before vera", "payload", ".", "update", "(", "{", "'id'", ":", "'lu_sdata'", ",", "}", ")", "logger", ".", "debug", "(", "\"get_changed_devices() requesting payload %s\"", ",", "str", "(", "payload", ")", ")", "r", "=", "self", ".", "data_request", "(", "payload", ",", "TIMEOUT", "*", "2", ")", "r", ".", "raise_for_status", "(", ")", "# If the Vera disconnects before writing a full response (as lu_sdata", "# will do when interrupted by a Luup reload), the requests module will", "# happily return 200 with an empty string. So, test for empty response,", "# so we don't rely on the JSON parser to throw an exception.", "if", "r", ".", "text", "==", "\"\"", ":", "raise", "PyveraError", "(", "\"Empty response from Vera\"", ")", "# Catch a wide swath of what the JSON parser might throw, within", "# reason. Unfortunately, some parsers don't specifically return", "# json.decode.JSONDecodeError, but so far most seem to derive what", "# they do throw from ValueError, so that's helpful.", "try", ":", "result", "=", "r", ".", "json", "(", ")", "except", "ValueError", "as", "ex", ":", "raise", "PyveraError", "(", "\"JSON decode error: \"", "+", "str", "(", "ex", ")", ")", "if", "not", "(", "type", "(", "result", ")", "is", "dict", "and", "'loadtime'", "in", "result", "and", "'dataversion'", "in", "result", ")", ":", "raise", "PyveraError", "(", "\"Unexpected/garbled response from Vera\"", ")", "# At this point, all good. Update timestamp and return change data.", "device_data", "=", "result", ".", "get", "(", "'devices'", ")", "timestamp", "=", "{", "'loadtime'", ":", "result", ".", "get", "(", "'loadtime'", ")", ",", "'dataversion'", ":", "result", ".", "get", "(", "'dataversion'", ")", "}", "return", "[", "device_data", ",", "timestamp", "]" ]
This module converts SAMFILE from sam to bam
def bam_conversion ( job , samfile , sample_type , univ_options ) : job . fileStore . logToMaster ( 'Running sam2bam on %s:%s' % ( univ_options [ 'patient' ] , sample_type ) ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'aligned.sam' : samfile } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) bamfile = '/' . join ( [ work_dir , 'aligned.bam' ] ) parameters = [ 'view' , '-bS' , '-o' , docker_path ( bamfile ) , input_files [ 'aligned.sam' ] ] docker_call ( tool = 'samtools' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_file = job . fileStore . writeGlobalFile ( bamfile ) job . fileStore . deleteGlobalFile ( samfile ) reheader_bam = job . wrapJobFn ( fix_bam_header , output_file , sample_type , univ_options , disk = '60G' ) job . addChild ( reheader_bam ) return reheader_bam . rv ( )
2,822
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L417-L448
[ "def", "line_line_collide", "(", "line1", ",", "line2", ")", ":", "s", ",", "t", ",", "success", "=", "segment_intersection", "(", "line1", "[", ":", ",", "0", "]", ",", "line1", "[", ":", ",", "1", "]", ",", "line2", "[", ":", ",", "0", "]", ",", "line2", "[", ":", ",", "1", "]", ")", "if", "success", ":", "return", "_helpers", ".", "in_interval", "(", "s", ",", "0.0", ",", "1.0", ")", "and", "_helpers", ".", "in_interval", "(", "t", ",", "0.0", ",", "1.0", ")", "else", ":", "disjoint", ",", "_", "=", "parallel_lines_parameters", "(", "line1", "[", ":", ",", "0", "]", ",", "line1", "[", ":", ",", "1", "]", ",", "line2", "[", ":", ",", "0", "]", ",", "line2", "[", ":", ",", "1", "]", ")", "return", "not", "disjoint" ]
This module modified the header in BAMFILE
def fix_bam_header ( job , bamfile , sample_type , univ_options ) : job . fileStore . logToMaster ( 'Running reheader on %s:%s' % ( univ_options [ 'patient' ] , sample_type ) ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'aligned.bam' : bamfile } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ 'view' , '-H' , input_files [ 'aligned.bam' ] ] with open ( '/' . join ( [ work_dir , 'aligned_bam.header' ] ) , 'w' ) as headerfile : docker_call ( tool = 'samtools' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , outfile = headerfile ) with open ( headerfile . name , 'r' ) as headerfile , open ( '/' . join ( [ work_dir , 'output_bam.header' ] ) , 'w' ) as outheaderfile : for line in headerfile : if line . startswith ( '@PG' ) : line = '\t' . join ( [ x for x in line . strip ( ) . split ( '\t' ) if not x . startswith ( 'CL' ) ] ) print ( line . strip ( ) , file = outheaderfile ) parameters = [ 'reheader' , docker_path ( outheaderfile . name ) , input_files [ 'aligned.bam' ] ] with open ( '/' . join ( [ work_dir , 'aligned_fixPG.bam' ] ) , 'w' ) as fixpg_bamfile : docker_call ( tool = 'samtools' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , outfile = fixpg_bamfile ) output_file = job . fileStore . writeGlobalFile ( fixpg_bamfile . name ) job . fileStore . deleteGlobalFile ( bamfile ) add_rg = job . wrapJobFn ( add_readgroups , output_file , sample_type , univ_options , disk = '60G' ) job . addChild ( add_rg ) return add_rg . rv ( )
2,823
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L451-L491
[ "def", "lv_grid_generators_bus_bar", "(", "nd", ")", ":", "lv_stats", "=", "{", "}", "for", "la", "in", "nd", ".", "_mv_grid_districts", "[", "0", "]", ".", "lv_load_areas", "(", ")", ":", "for", "lvgd", "in", "la", ".", "lv_grid_districts", "(", ")", ":", "station_neighbors", "=", "list", "(", "lvgd", ".", "lv_grid", ".", "_graph", "[", "lvgd", ".", "lv_grid", ".", "_station", "]", ".", "keys", "(", ")", ")", "# check if nodes of a statio are members of list generators", "station_generators", "=", "[", "x", "for", "x", "in", "station_neighbors", "if", "x", "in", "lvgd", ".", "lv_grid", ".", "generators", "(", ")", "]", "lv_stats", "[", "repr", "(", "lvgd", ".", "lv_grid", ".", "_station", ")", "]", "=", "station_generators", "return", "lv_stats" ]
This module will run rsem on the RNA Bam file .
def run_rsem ( job , star_bams , univ_options , rsem_options ) : job . fileStore . logToMaster ( 'Running rsem index on %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'star_transcriptome.bam' : star_bams [ 'rnaAligned.toTranscriptome.out.bam' ] , 'rsem_index.tar.gz' : rsem_options [ 'index_tar' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ '--paired-end' , '-p' , str ( rsem_options [ 'n' ] ) , '--bam' , input_files [ 'star_transcriptome.bam' ] , '--no-bam-output' , '/' . join ( [ input_files [ 'rsem_index' ] , 'hg19' ] ) , 'rsem' ] docker_call ( tool = 'rsem' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_file = job . fileStore . writeGlobalFile ( '/' . join ( [ work_dir , 'rsem.isoforms.results' ] ) ) return output_file
2,824
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L565-L603
[ "def", "_timestamp_zero_start_handler", "(", "c", ",", "ctx", ")", ":", "val", "=", "ctx", ".", "value", "ctx", ".", "set_ion_type", "(", "IonType", ".", "TIMESTAMP", ")", "if", "val", "[", "0", "]", "==", "_MINUS", ":", "_illegal_character", "(", "c", ",", "ctx", ",", "'Negative year not allowed.'", ")", "val", ".", "append", "(", "c", ")", "c", ",", "self", "=", "yield", "trans", "=", "ctx", ".", "immediate_transition", "(", "self", ")", "while", "True", ":", "if", "c", "in", "_TIMESTAMP_YEAR_DELIMITERS", ":", "trans", "=", "ctx", ".", "immediate_transition", "(", "_timestamp_handler", "(", "c", ",", "ctx", ")", ")", "elif", "c", "in", "_DIGITS", ":", "val", ".", "append", "(", "c", ")", "else", ":", "_illegal_character", "(", "c", ",", "ctx", ")", "c", ",", "_", "=", "yield", "trans" ]
This module will merge the per - chromosome radia files created by spawn_radia into a genome vcf . It will make 2 vcfs one for PASSing non - germline calls and one for all calls .
def merge_radia ( job , perchrom_rvs ) : job . fileStore . logToMaster ( 'Running merge_radia' ) work_dir = job . fileStore . getLocalTempDir ( ) # We need to squash the input dict of dicts to a single dict such that it can be passed to # get_files_from_filestore input_files = { filename : jsid for perchrom_files in perchrom_rvs . values ( ) for filename , jsid in perchrom_files . items ( ) } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) chromosomes = [ '' . join ( [ 'chr' , str ( x ) ] ) for x in range ( 1 , 23 ) + [ 'X' , 'Y' ] ] with open ( '/' . join ( [ work_dir , 'radia_calls.vcf' ] ) , 'w' ) as radfile , open ( '/' . join ( [ work_dir , 'radia_filter_passing_calls.vcf' ] ) , 'w' ) as radpassfile : for chrom in chromosomes : with open ( input_files [ '' . join ( [ 'radia_filtered_' , chrom , '.vcf' ] ) ] , 'r' ) as filtradfile : for line in filtradfile : line = line . strip ( ) if line . startswith ( '#' ) : if chrom == 'chr1' : print ( line , file = radfile ) print ( line , file = radpassfile ) continue else : print ( line , file = radfile ) line = line . split ( '\t' ) if line [ 6 ] == 'PASS' and 'MT=GERM' not in line [ 7 ] : print ( '\t' . join ( line ) , file = radpassfile ) # parse the PASS radia vcf for multiple alt alleles with open ( radpassfile . name , 'r' ) as radpassfile , open ( '/' . join ( [ work_dir , 'radia_parsed_filter_passing_calls.vcf' ] ) , 'w' ) as parsedradfile : parse_radia_multi_alt ( radpassfile , parsedradfile ) output_files = defaultdict ( ) for radia_file in [ radfile . name , parsedradfile . name ] : output_files [ os . path . basename ( radia_file ) ] = job . fileStore . writeGlobalFile ( radia_file ) return output_files
2,825
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L738-L787
[ "def", "byte_bounds_offset", "(", "self", ")", ":", "if", "self", ".", "data", ".", "base", "is", "None", ":", "if", "self", ".", "is_indexed", ":", "basearray", "=", "self", ".", "data", ".", "np_data", "else", ":", "basearray", "=", "self", ".", "data", "return", "0", ",", "len", "(", "basearray", ")", "return", "int", "(", "self", ".", "data_start", "-", "self", ".", "base_start", ")", ",", "int", "(", "self", ".", "data_end", "-", "self", ".", "base_start", ")" ]
This module will run radia on the RNA and DNA bams
def run_radia ( job , bams , univ_options , radia_options , chrom ) : job . fileStore . logToMaster ( 'Running radia on %s:%s' % ( univ_options [ 'patient' ] , chrom ) ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna' ] , 'normal.bam.bai' : bams [ 'normal_dnai' ] , 'genome.fasta' : radia_options [ 'genome_fasta' ] , 'genome.fasta.fai' : radia_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) radia_output = '' . join ( [ work_dir , '/radia_' , chrom , '.vcf' ] ) radia_log = '' . join ( [ work_dir , '/radia_' , chrom , '_radia.log' ] ) parameters = [ univ_options [ 'patient' ] , # shortID chrom , '-n' , input_files [ 'normal.bam' ] , '-t' , input_files [ 'tumor.bam' ] , '-r' , input_files [ 'rna.bam' ] , '' . join ( [ '--rnaTumorFasta=' , input_files [ 'genome.fasta' ] ] ) , '-f' , input_files [ 'genome.fasta' ] , '-o' , docker_path ( radia_output ) , '-i' , 'hg19_M_rCRS' , '-m' , input_files [ 'genome.fasta' ] , '-d' , 'aarjunrao@soe.ucsc.edu' , '-q' , 'Illumina' , '--disease' , 'CANCER' , '-l' , 'INFO' , '-g' , docker_path ( radia_log ) ] docker_call ( tool = 'radia' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files = defaultdict ( ) for radia_file in [ radia_output , radia_log ] : output_files [ os . path . basename ( radia_file ) ] = job . fileStore . writeGlobalFile ( radia_file ) filterradia = job . wrapJobFn ( run_filter_radia , bams , output_files [ os . path . basename ( radia_output ) ] , univ_options , radia_options , chrom , disk = '60G' , memory = '6G' ) job . addChild ( filterradia ) return filterradia . rv ( )
2,826
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L790-L857
[ "def", "uniform_partition_fromintv", "(", "intv_prod", ",", "shape", ",", "nodes_on_bdry", "=", "False", ")", ":", "grid", "=", "uniform_grid_fromintv", "(", "intv_prod", ",", "shape", ",", "nodes_on_bdry", "=", "nodes_on_bdry", ")", "return", "RectPartition", "(", "intv_prod", ",", "grid", ")" ]
This module will run filterradia on the RNA and DNA bams .
def run_filter_radia ( job , bams , radia_file , univ_options , radia_options , chrom ) : job . fileStore . logToMaster ( 'Running filter-radia on %s:%s' % ( univ_options [ 'patient' ] , chrom ) ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna' ] , 'normal.bam.bai' : bams [ 'normal_dnai' ] , 'radia.vcf' : radia_file , 'genome.fasta' : radia_options [ 'genome_fasta' ] , 'genome.fasta.fai' : radia_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) filterradia_output = '' . join ( [ 'radia_filtered_' , chrom , '.vcf' ] ) filterradia_log = '' . join ( [ work_dir , '/radia_filtered_' , chrom , '_radia.log' ] ) parameters = [ univ_options [ 'patient' ] , # shortID chrom . lstrip ( 'chr' ) , input_files [ 'radia.vcf' ] , '/data' , '/home/radia/scripts' , '-b' , '/home/radia/data/hg19/blacklists/1000Genomes/phase1/' , '-d' , '/home/radia/data/hg19/snp135' , '-r' , '/home/radia/data/hg19/retroGenes/' , '-p' , '/home/radia/data/hg19/pseudoGenes/' , '-c' , '/home/radia/data/hg19/cosmic/' , '-t' , '/home/radia/data/hg19/gaf/2_1' , '--noSnpEff' , '--rnaGeneBlckFile' , '/home/radia/data/rnaGeneBlacklist.tab' , '--rnaGeneFamilyBlckFile' , '/home/radia/data/rnaGeneFamilyBlacklist.tab' , '-f' , input_files [ 'genome.fasta' ] , '--log=INFO' , '-g' , docker_path ( filterradia_log ) ] docker_call ( tool = 'filterradia' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files = defaultdict ( ) output_files [ filterradia_output ] = job . fileStore . writeGlobalFile ( '' . join ( [ work_dir , '/' , univ_options [ 'patient' ] , '_' , chrom , '.vcf' ] ) ) output_files [ os . path . basename ( filterradia_log ) ] = job . fileStore . writeGlobalFile ( filterradia_log ) return output_files
2,827
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L860-L921
[ "def", "skip_all", "(", "self", ")", ":", "storage", ",", "streaming", "=", "self", ".", "engine", ".", "count", "(", ")", "if", "self", ".", "selector", ".", "output", ":", "self", ".", "offset", "=", "streaming", "else", ":", "self", ".", "offset", "=", "storage", "self", ".", "_count", "=", "0" ]
This module will merge the per - chromosome mutect files created by spawn_mutect into a genome vcf . It will make 2 vcfs one for PASSing non - germline calls and one for all calls .
def merge_mutect ( job , perchrom_rvs ) : job . fileStore . logToMaster ( 'Running merge_mutect' ) work_dir = job . fileStore . getLocalTempDir ( ) # We need to squash the input dict of dicts to a single dict such that it can be passed to # get_files_from_filestore input_files = { filename : jsid for perchrom_files in perchrom_rvs . values ( ) for filename , jsid in perchrom_files . items ( ) } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) chromosomes = [ '' . join ( [ 'chr' , str ( x ) ] ) for x in range ( 1 , 23 ) + [ 'X' , 'Y' ] ] with open ( '/' . join ( [ work_dir , 'mutect_calls.vcf' ] ) , 'w' ) as mutvcf , open ( '/' . join ( [ work_dir , 'mutect_calls.out' ] ) , 'w' ) as mutout , open ( '/' . join ( [ work_dir , 'mutect_passing_calls.vcf' ] ) , 'w' ) as mutpassvcf : out_header_not_printed = True for chrom in chromosomes : with open ( input_files [ '' . join ( [ 'mutect_' , chrom , '.vcf' ] ) ] , 'r' ) as mutfile : for line in mutfile : line = line . strip ( ) if line . startswith ( '#' ) : if chrom == 'chr1' : print ( line , file = mutvcf ) print ( line , file = mutpassvcf ) continue else : print ( line , file = mutvcf ) line = line . split ( '\t' ) if line [ 6 ] != 'REJECT' : print ( '\t' . join ( line ) , file = mutpassvcf ) with open ( input_files [ '' . join ( [ 'mutect_' , chrom , '.out' ] ) ] , 'r' ) as mutfile : for line in mutfile : line = line . strip ( ) if line . startswith ( '#' ) : if chrom == 'chr1' : print ( line , file = mutout ) continue elif out_header_not_printed : print ( line , file = mutout ) out_header_not_printed = False else : print ( line , file = mutout ) output_file = job . fileStore . writeGlobalFile ( mutpassvcf . name ) return output_file
2,828
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L973-L1026
[ "def", "iterator", "(", "self", ")", ":", "results", "=", "super", "(", "SearchableQuerySet", ",", "self", ")", ".", "iterator", "(", ")", "if", "self", ".", "_search_terms", "and", "not", "self", ".", "_search_ordered", ":", "results", "=", "list", "(", "results", ")", "for", "i", ",", "result", "in", "enumerate", "(", "results", ")", ":", "count", "=", "0", "related_weights", "=", "[", "]", "for", "(", "field", ",", "weight", ")", "in", "self", ".", "_search_fields", ".", "items", "(", ")", ":", "if", "\"__\"", "in", "field", ":", "related_weights", ".", "append", "(", "weight", ")", "for", "term", "in", "self", ".", "_search_terms", ":", "field_value", "=", "getattr", "(", "result", ",", "field", ",", "None", ")", "if", "field_value", ":", "count", "+=", "field_value", ".", "lower", "(", ")", ".", "count", "(", "term", ")", "*", "weight", "if", "not", "count", "and", "related_weights", ":", "count", "=", "int", "(", "sum", "(", "related_weights", ")", "/", "len", "(", "related_weights", ")", ")", "results", "[", "i", "]", ".", "result_count", "=", "count", "return", "iter", "(", "results", ")", "return", "results" ]
This module will run mutect on the DNA bams
def run_mutect ( job , tumor_bam , normal_bam , univ_options , mutect_options , chrom ) : job . fileStore . logToMaster ( 'Running mutect on %s:%s' % ( univ_options [ 'patient' ] , chrom ) ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'tumor.bam' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , 'tumor.bam.bai' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam.bai' ] , 'normal.bam' : normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , 'normal.bam.bai' : normal_bam [ 'normal_dna_fix_pg_sorted.bam.bai' ] , 'genome.fa' : mutect_options [ 'genome_fasta' ] , 'genome.fa.fai' : mutect_options [ 'genome_fai' ] , 'genome.dict' : mutect_options [ 'genome_dict' ] , 'cosmic.vcf' : mutect_options [ 'cosmic_vcf' ] , 'cosmic.vcf.idx' : mutect_options [ 'cosmic_idx' ] , 'dbsnp.vcf' : mutect_options [ 'dbsnp_vcf' ] , 'dbsnp.vcf.idx' : mutect_options [ 'dbsnp_idx' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) mutout = '' . join ( [ work_dir , '/mutect_' , chrom , '.out' ] ) mutvcf = '' . join ( [ work_dir , '/mutect_' , chrom , '.vcf' ] ) parameters = [ '-R' , input_files [ 'genome.fa' ] , '--cosmic' , input_files [ 'cosmic.vcf' ] , '--dbsnp' , input_files [ 'dbsnp.vcf' ] , '--input_file:normal' , input_files [ 'normal.bam' ] , '--input_file:tumor' , input_files [ 'tumor.bam' ] , #'--tumor_lod', str(10), #'--initial_tumor_lod', str(4.0), '-L' , chrom , '--out' , docker_path ( mutout ) , '--vcf' , docker_path ( mutvcf ) ] Xmx = mutect_options [ 'java_Xmx' ] if mutect_options [ 'java_Xmx' ] else univ_options [ 'java_Xmx' ] docker_call ( tool = 'mutect:1.1.7' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , java_opts = Xmx ) output_files = defaultdict ( ) for mutect_file in [ mutout , mutvcf ] : output_files [ os . path . basename ( mutect_file ) ] = job . fileStore . writeGlobalFile ( mutect_file ) return output_files
2,829
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1029-L1083
[ "def", "_PrintSessionsOverview", "(", "self", ",", "storage_reader", ")", ":", "table_view", "=", "views", ".", "ViewsFactory", ".", "GetTableView", "(", "self", ".", "_views_format_type", ",", "title", "=", "'Sessions'", ")", "for", "session", "in", "storage_reader", ".", "GetSessions", "(", ")", ":", "start_time", "=", "timelib", ".", "Timestamp", ".", "CopyToIsoFormat", "(", "session", ".", "start_time", ")", "session_identifier", "=", "uuid", ".", "UUID", "(", "hex", "=", "session", ".", "identifier", ")", "session_identifier", "=", "'{0!s}'", ".", "format", "(", "session_identifier", ")", "table_view", ".", "AddRow", "(", "[", "session_identifier", ",", "start_time", "]", ")", "table_view", ".", "Write", "(", "self", ".", "_output_writer", ")" ]
This module will run an indel caller on the DNA bams . This module will be implemented in the future .
def run_indel_caller ( job , tumor_bam , normal_bam , univ_options , indel_options ) : job . fileStore . logToMaster ( 'Running INDEL on %s' % univ_options [ 'patient' ] ) indel_file = job . fileStore . getLocalTempFile ( ) output_file = job . fileStore . writeGlobalFile ( indel_file ) return output_file
2,830
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1086-L1096
[ "def", "saturation", "(", "self", ",", "value", ")", ":", "value", "=", "clean_float", "(", "value", ")", "if", "value", "is", "None", ":", "return", "try", ":", "unit_moisture_weight", "=", "self", ".", "unit_moist_weight", "-", "self", ".", "unit_dry_weight", "unit_moisture_volume", "=", "unit_moisture_weight", "/", "self", ".", "_pw", "saturation", "=", "unit_moisture_volume", "/", "self", ".", "_calc_unit_void_volume", "(", ")", "if", "saturation", "is", "not", "None", "and", "not", "ct", ".", "isclose", "(", "saturation", ",", "value", ",", "rel_tol", "=", "self", ".", "_tolerance", ")", ":", "raise", "ModelError", "(", "\"New saturation (%.3f) is inconsistent \"", "\"with calculated value (%.3f)\"", "%", "(", "value", ",", "saturation", ")", ")", "except", "TypeError", ":", "pass", "old_value", "=", "self", ".", "saturation", "self", ".", "_saturation", "=", "value", "try", ":", "self", ".", "recompute_all_weights_and_void", "(", ")", "self", ".", "_add_to_stack", "(", "\"saturation\"", ",", "value", ")", "except", "ModelError", "as", "e", ":", "self", ".", "_saturation", "=", "old_value", "raise", "ModelError", "(", "e", ")" ]
This module will run a fusion caller on DNA bams . This module will be implemented in the future .
def run_fusion_caller ( job , star_bam , univ_options , fusion_options ) : job . fileStore . logToMaster ( 'Running FUSION on %s' % univ_options [ 'patient' ] ) fusion_file = job . fileStore . getLocalTempFile ( ) output_file = job . fileStore . writeGlobalFile ( fusion_file ) return output_file
2,831
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1099-L1109
[ "def", "pull_session", "(", "session_id", "=", "None", ",", "url", "=", "'default'", ",", "io_loop", "=", "None", ",", "arguments", "=", "None", ")", ":", "coords", "=", "_SessionCoordinates", "(", "session_id", "=", "session_id", ",", "url", "=", "url", ")", "session", "=", "ClientSession", "(", "session_id", "=", "session_id", ",", "websocket_url", "=", "websocket_url_for_server_url", "(", "coords", ".", "url", ")", ",", "io_loop", "=", "io_loop", ",", "arguments", "=", "arguments", ")", "session", ".", "pull", "(", ")", "return", "session" ]
This module will aggregate all the mutations called in the previous steps and will then call snpeff on the results .
def run_mutation_aggregator ( job , fusion_output , radia_output , mutect_output , indel_output , univ_options ) : job . fileStore . logToMaster ( 'Aggregating mutations for %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'mutect.vcf' : mutect_output , 'radia.vcf' : radia_output [ 'radia_parsed_filter_passing_calls.vcf' ] , 'indel.vcf' : indel_output , 'fusion.vcf' : fusion_output } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) # Modify these once INDELs and Fusions are implemented input_files . pop ( 'indel.vcf' ) input_files . pop ( 'fusion.vcf' ) # read files into memory vcf_file = defaultdict ( ) mutcallers = input_files . keys ( ) with open ( '' . join ( [ work_dir , '/' , univ_options [ 'patient' ] , '_merged_mutations.vcf' ] ) , 'w' ) as merged_mut_file : for mut_caller in mutcallers : caller = mut_caller . rstrip ( '.vcf' ) vcf_file [ caller ] = defaultdict ( ) with open ( input_files [ mut_caller ] , 'r' ) as mutfile : for line in mutfile : if line . startswith ( '#' ) : if caller == 'radia' : print ( line . strip ( ) , file = merged_mut_file ) continue line = line . strip ( ) . split ( ) vcf_file [ caller ] [ ( line [ 0 ] , line [ 1 ] , line [ 3 ] , line [ 4 ] ) ] = line # This method can be changed in the future to incorporate more callers and # fancier integration methods merge_vcfs ( vcf_file , merged_mut_file . name ) export_results ( merged_mut_file . name , univ_options ) output_file = job . fileStore . writeGlobalFile ( merged_mut_file . name ) return output_file
2,832
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1112-L1161
[ "def", "SetBackingStore", "(", "cls", ",", "backing", ")", ":", "if", "backing", "not", "in", "[", "'json'", ",", "'sqlite'", ",", "'memory'", "]", ":", "raise", "ArgumentError", "(", "\"Unknown backing store type that is not json or sqlite\"", ",", "backing", "=", "backing", ")", "if", "backing", "==", "'json'", ":", "cls", ".", "BackingType", "=", "JSONKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.json'", "elif", "backing", "==", "'memory'", ":", "cls", ".", "BackingType", "=", "InMemoryKVStore", "cls", ".", "BackingFileName", "=", "None", "else", ":", "cls", ".", "BackingType", "=", "SQLiteKVStore", "cls", ".", "BackingFileName", "=", "'component_registry.db'" ]
This module will run snpeff on the aggregated mutation calls . Currently the only mutations called are SNPs hence SnpEff suffices . This node will be replaced in the future with another translator .
def run_snpeff ( job , merged_mutation_file , univ_options , snpeff_options ) : job . fileStore . logToMaster ( 'Running snpeff on %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'merged_mutations.vcf' : merged_mutation_file , 'snpeff_index.tar.gz' : snpeff_options [ 'index_tar' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ 'eff' , '-dataDir' , input_files [ 'snpeff_index' ] , '-c' , '/' . join ( [ input_files [ 'snpeff_index' ] , 'snpEff_hg19_gencode.config' ] ) , '-no-intergenic' , '-no-downstream' , '-no-upstream' , #'-canon', '-noStats' , 'hg19_gencode' , input_files [ 'merged_mutations.vcf' ] ] Xmx = snpeff_options [ 'java_Xmx' ] if snpeff_options [ 'java_Xmx' ] else univ_options [ 'java_Xmx' ] with open ( '/' . join ( [ work_dir , 'snpeffed_mutations.vcf' ] ) , 'w' ) as snpeff_file : docker_call ( tool = 'snpeff' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , java_opts = Xmx , outfile = snpeff_file ) output_file = job . fileStore . writeGlobalFile ( snpeff_file . name ) return output_file
2,833
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1164-L1205
[ "def", "find_stream", "(", "cls", ",", "fileobj", ",", "max_bytes", ")", ":", "r", "=", "BitReader", "(", "fileobj", ")", "stream", "=", "cls", "(", "r", ")", "if", "stream", ".", "sync", "(", "max_bytes", ")", ":", "stream", ".", "offset", "=", "(", "r", ".", "get_position", "(", ")", "-", "12", ")", "//", "8", "return", "stream" ]
This module will run transgene on the input vcf file from the aggregator and produce the peptides for MHC prediction
def run_transgene ( job , snpeffed_file , univ_options , transgene_options ) : job . fileStore . logToMaster ( 'Running transgene on %s' % univ_options [ 'patient' ] ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'snpeffed_muts.vcf' : snpeffed_file , 'pepts.fa' : transgene_options [ 'gencode_peptide_fasta' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ '--peptides' , input_files [ 'pepts.fa' ] , '--snpeff' , input_files [ 'snpeffed_muts.vcf' ] , '--prefix' , 'transgened' , '--pep_lens' , '9,10,15' ] docker_call ( tool = 'transgene' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files = defaultdict ( ) for peplen in [ '9' , '10' , '15' ] : peptfile = '_' . join ( [ 'transgened_tumor' , peplen , 'mer_snpeffed.faa' ] ) mapfile = '_' . join ( [ 'transgened_tumor' , peplen , 'mer_snpeffed.faa.map' ] ) output_files [ peptfile ] = job . fileStore . writeGlobalFile ( os . path . join ( work_dir , peptfile ) ) output_files [ mapfile ] = job . fileStore . writeGlobalFile ( os . path . join ( work_dir , mapfile ) ) return output_files
2,834
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1208-L1249
[ "def", "shared_dataset_ids", "(", "self", ")", ":", "shared_ids", "=", "set", "(", "self", ".", "scenes", "[", "0", "]", ".", "keys", "(", ")", ")", "for", "scene", "in", "self", ".", "scenes", "[", "1", ":", "]", ":", "shared_ids", "&=", "set", "(", "scene", ".", "keys", "(", ")", ")", "return", "shared_ids" ]
This module will run PHLAT on SAMPLE_TYPE fastqs .
def run_phlat ( job , fastqs , sample_type , univ_options , phlat_options ) : job . fileStore . logToMaster ( 'Running phlat on %s:%s' % ( univ_options [ 'patient' ] , sample_type ) ) work_dir = job . fileStore . getLocalTempDir ( ) fq_extn = '.gz' if fastqs [ 'gzipped' ] else '' input_files = { 'input_1.fastq' + fq_extn : fastqs [ sample_type ] [ 0 ] , 'input_2.fastq' + fq_extn : fastqs [ sample_type ] [ 1 ] , 'phlat_index.tar.gz' : phlat_options [ 'index_tar' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ '-1' , input_files [ 'input_1.fastq' ] , '-2' , input_files [ 'input_2.fastq' ] , '-index' , input_files [ 'phlat_index' ] , '-b2url' , '/usr/local/bin/bowtie2' , '-tag' , sample_type , '-e' , '/home/phlat-1.0' , # Phlat directory home '-o' , '/data' , # Output directory '-p' , str ( phlat_options [ 'n' ] ) ] # Number of threads docker_call ( tool = 'phlat' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_file = job . fileStore . writeGlobalFile ( '' . join ( [ work_dir , '/' , sample_type , '_HLA.sum' ] ) ) return output_file
2,835
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1252-L1294
[ "def", "end_stream", "(", "self", ",", "stream_id", ")", ":", "with", "(", "yield", "from", "self", ".", "_get_stream", "(", "stream_id", ")", ".", "wlock", ")", ":", "yield", "from", "self", ".", "_resumed", ".", "wait", "(", ")", "self", ".", "_conn", ".", "end_stream", "(", "stream_id", ")", "self", ".", "_flush", "(", ")" ]
This module will merge the results form running PHLAT on the 3 input fastq pairs .
def merge_phlat_calls ( job , tumor_phlat , normal_phlat , rna_phlat ) : job . fileStore . logToMaster ( 'Merging Phlat calls' ) work_dir = job . fileStore . getLocalTempDir ( ) input_files = { 'tumor_dna' : tumor_phlat , 'normal_dna' : normal_phlat , 'tumor_rna' : rna_phlat } input_files = get_files_from_filestore ( job , input_files , work_dir ) with open ( input_files [ 'tumor_dna' ] , 'r' ) as td_file , open ( input_files [ 'normal_dna' ] , 'r' ) as nd_file , open ( input_files [ 'tumor_rna' ] , 'r' ) as tr_file : # TODO: Could this be a defautdict? mhc_alleles = { 'HLA_A' : [ ] , 'HLA_B' : [ ] , 'HLA_C' : [ ] , 'HLA_DPA' : [ ] , 'HLA_DQA' : [ ] , 'HLA_DPB' : [ ] , 'HLA_DQB' : [ ] , 'HLA_DRB' : [ ] } for phlatfile in td_file , nd_file , tr_file : mhc_alleles = parse_phlat_file ( phlatfile , mhc_alleles ) # Get most probable alleles for each allele group and print to output with open ( os . path . join ( work_dir , 'mhci_alleles.list' ) , 'w' ) as mhci_file , open ( os . path . join ( work_dir , 'mhcii_alleles.list' ) , 'w' ) as mhcii_file : for mhci_group in [ 'HLA_A' , 'HLA_B' , 'HLA_C' ] : mpa = most_probable_alleles ( mhc_alleles [ mhci_group ] ) print ( '\n' . join ( [ '' . join ( [ 'HLA-' , x ] ) for x in mpa ] ) , file = mhci_file ) drb_mpa = most_probable_alleles ( mhc_alleles [ 'HLA_DRB' ] ) print ( '\n' . join ( [ '' . join ( [ 'HLA-' , x ] ) for x in drb_mpa ] ) , file = mhcii_file ) dqa_mpa = most_probable_alleles ( mhc_alleles [ 'HLA_DQA' ] ) dqb_mpa = most_probable_alleles ( mhc_alleles [ 'HLA_DQB' ] ) for dqa_allele in dqa_mpa : for dqb_allele in dqb_mpa : print ( '' . join ( [ 'HLA-' , dqa_allele , '/' , dqb_allele ] ) , file = mhcii_file ) output_files = defaultdict ( ) for allele_file in [ 'mhci_alleles.list' , 'mhcii_alleles.list' ] : output_files [ allele_file ] = job . fileStore . writeGlobalFile ( os . path . join ( work_dir , allele_file ) ) return output_files
2,836
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1297-L1347
[ "def", "before_websocket", "(", "self", ",", "func", ":", "Callable", ")", "->", "Callable", ":", "self", ".", "record_once", "(", "lambda", "state", ":", "state", ".", "app", ".", "before_websocket", "(", "func", ",", "self", ".", "name", ")", ")", "return", "func" ]
This is the final module in the pipeline . It will call the rank boosting R script .
def boost_ranks ( job , isoform_expression , merged_mhc_calls , transgene_out , univ_options , rank_boost_options ) : job . fileStore . logToMaster ( 'Running boost_ranks on %s' % univ_options [ 'patient' ] ) work_dir = os . path . join ( job . fileStore . getLocalTempDir ( ) , univ_options [ 'patient' ] ) os . mkdir ( work_dir ) input_files = { 'rsem_quant.tsv' : isoform_expression , 'mhci_merged_files.tsv' : merged_mhc_calls [ 'mhci_merged_files.list' ] , 'mhcii_merged_files.tsv' : merged_mhc_calls [ 'mhcii_merged_files.list' ] , 'mhci_peptides.faa' : transgene_out [ 'transgened_tumor_10_mer_snpeffed.faa' ] , 'mhcii_peptides.faa' : transgene_out [ 'transgened_tumor_15_mer_snpeffed.faa' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) output_files = { } for mhc in ( 'mhci' , 'mhcii' ) : parameters = [ mhc , input_files [ '' . join ( [ mhc , '_merged_files.tsv' ] ) ] , input_files [ 'rsem_quant.tsv' ] , input_files [ '' . join ( [ mhc , '_peptides.faa' ] ) ] , rank_boost_options [ '' . join ( [ mhc , '_combo' ] ) ] ] docker_call ( tool = 'rankboost' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] ) output_files [ mhc ] = { '' . join ( [ mhc , '_concise_results.tsv' ] ) : job . fileStore . writeGlobalFile ( '' . join ( [ work_dir , '/' , mhc , '_merged_files_concise_results.tsv' ] ) ) , '' . join ( [ mhc , '_detailed_results.tsv' ] ) : job . fileStore . writeGlobalFile ( '' . join ( [ work_dir , '/' , mhc , '_merged_files_detailed_results.tsv' ] ) ) } export_results ( work_dir , univ_options ) return output_files
2,837
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1676-L1712
[ "def", "reverse_char", "(", "self", ",", "hints", ")", ":", "if", "isinstance", "(", "hints", ",", "string_types", ")", ":", "hints", "=", "[", "hints", "]", "Unihan", "=", "self", ".", "sql", ".", "base", ".", "classes", ".", "Unihan", "columns", "=", "Unihan", ".", "__table__", ".", "columns", "return", "self", ".", "sql", ".", "session", ".", "query", "(", "Unihan", ")", ".", "filter", "(", "or_", "(", "*", "[", "column", ".", "contains", "(", "hint", ")", "for", "column", "in", "columns", "for", "hint", "in", "hints", "]", ")", ")" ]
This is adapted from John Vivian s return_input_paths from the RNA - Seq pipeline .
def get_files_from_filestore ( job , files , work_dir , cache = True , docker = False ) : for name in files . keys ( ) : outfile = job . fileStore . readGlobalFile ( files [ name ] , '/' . join ( [ work_dir , name ] ) , cache = cache ) # If the file pointed to a tarball, extract it to WORK_DIR if tarfile . is_tarfile ( outfile ) and file_xext ( outfile ) . startswith ( '.tar' ) : untar_name = os . path . basename ( strip_xext ( outfile ) ) files [ untar_name ] = untargz ( outfile , work_dir ) files . pop ( name ) name = os . path . basename ( untar_name ) # If the file is gzipped but NOT a tarfile, gunzip it to work_dir. However, the file is # already named x.gz so we need to write to a temporary file x.gz_temp then do a move # operation to overwrite x.gz. elif is_gzipfile ( outfile ) and file_xext ( outfile ) == '.gz' : ungz_name = strip_xext ( outfile ) with gzip . open ( outfile , 'rb' ) as gz_in , open ( ungz_name , 'w' ) as ungz_out : shutil . copyfileobj ( gz_in , ungz_out ) files [ os . path . basename ( ungz_name ) ] = outfile files . pop ( name ) name = os . path . basename ( ungz_name ) else : files [ name ] = outfile # If the files will be sent to docker, we will mount work_dir to the container as /data and # we want the /data prefixed path to the file if docker : files [ name ] = docker_path ( files [ name ] ) return files
2,838
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1866-L1904
[ "def", "exclude_types", "(", "self", ",", "*", "objs", ")", ":", "for", "o", "in", "objs", ":", "for", "t", "in", "_keytuple", "(", "o", ")", ":", "if", "t", "and", "t", "not", "in", "self", ".", "_excl_d", ":", "self", ".", "_excl_d", "[", "t", "]", "=", "0" ]
This module will accept the vcf files for mutect and radia read into memory in a dict object VCF_FILE and will merge the calls . Merged calls are printed to MERGED_MUT_FILE .
def merge_vcfs ( vcf_file , merged_mut_file ) : mutect_keys = set ( vcf_file [ 'mutect' ] . keys ( ) ) radia_keys = set ( vcf_file [ 'radia' ] . keys ( ) ) common_keys = radia_keys . intersection ( mutect_keys ) # Open as append since the header is already written with open ( merged_mut_file , 'a' ) as outfile : for mutation in common_keys : print ( '\t' . join ( vcf_file [ 'radia' ] [ mutation ] ) , file = outfile ) return None
2,839
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L1956-L1974
[ "def", "get_local_hypervisor", "(", "self", ")", ":", "# Look up hypervisors available filtered by my hostname", "host", "=", "self", ".", "get_my_hostname", "(", ")", "hyp", "=", "self", ".", "get_all_hypervisor_ids", "(", "filter_by_host", "=", "host", ")", "if", "hyp", ":", "return", "hyp", "[", "0", "]" ]
Makes subprocess call of a command to a docker container . work_dir MUST BE AN ABSOLUTE PATH or the call will fail . outfile is an open file descriptor to a writeable file .
def docker_call ( tool , tool_parameters , work_dir , java_opts = None , outfile = None , dockerhub = 'aarjunrao' , interactive = False ) : # If an outifle has been provided, then ensure that it is of type file, it is writeable, and # that it is open. if outfile : assert isinstance ( outfile , file ) , 'outfile was not passsed a file' assert outfile . mode in [ 'w' , 'a' , 'wb' , 'ab' ] , 'outfile not writeable' assert not outfile . closed , 'outfile is closed' # If the call is interactive, set intereactive to -i if interactive : interactive = '-i' else : interactive = '' # If a tag is passed along with the image, use it. if ':' in tool : docker_tool = '/' . join ( [ dockerhub , tool ] ) # Else use 'latest' else : docker_tool = '' . join ( [ dockerhub , '/' , tool , ':latest' ] ) # Get the docker image on the worker if needed call = [ 'docker' , 'images' ] dimg_rv = subprocess . check_output ( call ) existing_images = [ ':' . join ( x . split ( ) [ 0 : 2 ] ) for x in dimg_rv . splitlines ( ) if x . startswith ( dockerhub ) ] if docker_tool not in existing_images : try : call = ' ' . join ( [ 'docker' , 'pull' , docker_tool ] ) . split ( ) subprocess . check_call ( call ) except subprocess . CalledProcessError as err : raise RuntimeError ( 'docker command returned a non-zero exit status ' + '(%s)' % err . returncode + 'for command \"%s\"' % ' ' . join ( call ) , ) except OSError : raise RuntimeError ( 'docker not found on system. Install on all' + ' nodes.' ) # If java options have been provided, it needs to be in the docker call if java_opts : base_docker_call = ' docker run -e JAVA_OPTS=-Xmx{} ' . format ( java_opts ) + '--rm=true ' + '-v {}:/data --log-driver=none ' . format ( work_dir ) + interactive else : base_docker_call = ' docker run --rm=true -v {}:/data ' . format ( work_dir ) + '--log-driver=none ' + interactive call = base_docker_call . split ( ) + [ docker_tool ] + tool_parameters try : subprocess . check_call ( call , stdout = outfile ) except subprocess . CalledProcessError as err : raise RuntimeError ( 'docker command returned a non-zero exit status (%s)' % err . returncode + 'for command \"%s\"' % ' ' . join ( call ) , ) except OSError : raise RuntimeError ( 'docker not found on system. Install on all nodes.' )
2,840
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L2130-L2182
[ "def", "_headers", "(", "self", ",", "headers_dict", ")", ":", "return", "Headers", "(", "dict", "(", "(", "k", ",", "[", "v", "]", ")", "for", "(", "k", ",", "v", ")", "in", "headers_dict", ".", "items", "(", ")", ")", ")" ]
This module accepts a tar . gz archive and untars it .
def untargz ( input_targz_file , untar_to_dir ) : assert tarfile . is_tarfile ( input_targz_file ) , 'Not a tar file.' tarball = tarfile . open ( input_targz_file ) return_value = os . path . join ( untar_to_dir , tarball . getmembers ( ) [ 0 ] . name ) tarball . extractall ( path = untar_to_dir ) tarball . close ( ) return return_value
2,841
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L2185-L2199
[ "def", "_session_start", "(", "self", ")", ":", "return", "self", ".", "_session_class", "(", "self", ".", "_session_id", ",", "self", ".", "_session_duration", ",", "self", ".", "_session_settings", ")" ]
split an input bam to paired fastqs .
def bam2fastq ( job , bamfile , univ_options ) : work_dir = os . path . split ( bamfile ) [ 0 ] base_name = os . path . split ( os . path . splitext ( bamfile ) [ 0 ] ) [ 1 ] parameters = [ 'SamToFastq' , '' . join ( [ 'I=' , docker_path ( bamfile ) ] ) , '' . join ( [ 'F=/data/' , base_name , '_1.fastq' ] ) , '' . join ( [ 'F2=/data/' , base_name , '_2.fastq' ] ) , '' . join ( [ 'FU=/data/' , base_name , '_UP.fastq' ] ) ] docker_call ( tool = 'picard' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , java_opts = univ_options [ 'java_Xmx' ] ) first_fastq = '' . join ( [ work_dir , '/' , base_name , '_1.fastq' ] ) assert os . path . exists ( first_fastq ) return first_fastq
2,842
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L2386-L2408
[ "def", "node_cmd", "(", "cmd_name", ",", "node_dict", ")", ":", "sc", "=", "{", "\"run\"", ":", "cmd_startstop", ",", "\"stop\"", ":", "cmd_startstop", ",", "\"connect\"", ":", "cmd_connect", ",", "\"details\"", ":", "cmd_details", "}", "node_num", "=", "node_selection", "(", "cmd_name", ",", "len", "(", "node_dict", ")", ")", "refresh_main", "=", "None", "if", "node_num", "!=", "0", ":", "(", "node_valid", ",", "node_info", ")", "=", "node_validate", "(", "node_dict", ",", "node_num", ",", "cmd_name", ")", "if", "node_valid", ":", "sub_cmd", "=", "sc", "[", "cmd_name", "]", "# get sub-command", "refresh_main", "=", "sub_cmd", "(", "node_dict", "[", "node_num", "]", ",", "cmd_name", ",", "node_info", ")", "else", ":", "# invalid target", "ui_print_suffix", "(", "node_info", ",", "C_ERR", ")", "sleep", "(", "1.5", ")", "else", ":", "# '0' entered - exit command but not program", "ui_print", "(", "\" - Exit Command\"", ")", "sleep", "(", "0.5", ")", "return", "refresh_main" ]
This is the main function for the UCSC Precision Immuno pipeline .
def main ( ) : parser = argparse . ArgumentParser ( ) parser . add_argument ( '--config_file' , dest = 'config_file' , help = 'Config file to be used in the' + 'run.' , type = str , required = True , default = None ) Job . Runner . addToilOptions ( parser ) params = parser . parse_args ( ) START = Job . wrapJobFn ( parse_config_file , params . config_file ) . encapsulate ( ) Job . Runner . startToil ( START , params ) return None
2,843
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/ProTECT.py#L2530-L2541
[ "def", "stop_drivers", "(", "cls", ",", "maintain_default", "=", "False", ")", ":", "# Exclude first wrapper if the driver must be reused", "driver_wrappers", "=", "cls", ".", "driver_wrappers", "[", "1", ":", "]", "if", "maintain_default", "else", "cls", ".", "driver_wrappers", "for", "driver_wrapper", "in", "driver_wrappers", ":", "if", "not", "driver_wrapper", ".", "driver", ":", "continue", "try", ":", "driver_wrapper", ".", "driver", ".", "quit", "(", ")", "except", "Exception", "as", "e", ":", "driver_wrapper", ".", "logger", ".", "warn", "(", "\"Capture exceptions to avoid errors in teardown method due to session timeouts: \\n %s\"", "%", "e", ")" ]
A wrapper for the the entire strelka sub - graph .
def run_strelka_with_merge ( job , tumor_bam , normal_bam , univ_options , strelka_options ) : spawn = job . wrapJobFn ( run_strelka , tumor_bam , normal_bam , univ_options , strelka_options , split = False ) . encapsulate ( ) job . addChild ( spawn ) return spawn . rv ( )
2,844
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/strelka.py#L39-L53
[ "def", "write", "(", "self", ",", "symbol", ",", "data", ")", ":", "# get the full set of date ranges that we have", "cursor", "=", "self", ".", "_collection", ".", "find", "(", ")", "for", "res", "in", "cursor", ":", "library", "=", "self", ".", "_arctic_lib", ".", "arctic", "[", "res", "[", "'library_name'", "]", "]", "dslice", "=", "self", ".", "_slice", "(", "data", ",", "to_dt", "(", "res", "[", "'start'", "]", ",", "mktz", "(", "'UTC'", ")", ")", ",", "to_dt", "(", "res", "[", "'end'", "]", ",", "mktz", "(", "'UTC'", ")", ")", ")", "if", "len", "(", "dslice", ")", "!=", "0", ":", "library", ".", "write", "(", "symbol", ",", "dslice", ")" ]
Run the strelka subgraph on the DNA bams . Optionally split the results into per - chromosome vcfs .
def run_strelka ( job , tumor_bam , normal_bam , univ_options , strelka_options , split = True ) : if strelka_options [ 'chromosomes' ] : chromosomes = strelka_options [ 'chromosomes' ] else : chromosomes = sample_chromosomes ( job , strelka_options [ 'genome_fai' ] ) num_cores = min ( len ( chromosomes ) , univ_options [ 'max_cores' ] ) strelka = job . wrapJobFn ( run_strelka_full , tumor_bam , normal_bam , univ_options , strelka_options , disk = PromisedRequirement ( strelka_disk , tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , strelka_options [ 'genome_fasta' ] ) , memory = '6G' , cores = num_cores ) job . addChild ( strelka ) if split : unmerge_strelka = job . wrapJobFn ( wrap_unmerge , strelka . rv ( ) , chromosomes , strelka_options , univ_options ) . encapsulate ( ) strelka . addChild ( unmerge_strelka ) return unmerge_strelka . rv ( ) else : return strelka . rv ( )
2,845
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/strelka.py#L56-L102
[ "def", "_GetDelayImportTimestamps", "(", "self", ",", "pefile_object", ")", ":", "delay_import_timestamps", "=", "[", "]", "if", "not", "hasattr", "(", "pefile_object", ",", "'DIRECTORY_ENTRY_DELAY_IMPORT'", ")", ":", "return", "delay_import_timestamps", "for", "importdata", "in", "pefile_object", ".", "DIRECTORY_ENTRY_DELAY_IMPORT", ":", "dll_name", "=", "importdata", ".", "dll", "try", ":", "dll_name", "=", "dll_name", ".", "decode", "(", "'ascii'", ")", "except", "UnicodeDecodeError", ":", "dll_name", "=", "dll_name", ".", "decode", "(", "'ascii'", ",", "errors", "=", "'replace'", ")", "timestamp", "=", "getattr", "(", "importdata", ".", "struct", ",", "'dwTimeStamp'", ",", "0", ")", "delay_import_timestamps", ".", "append", "(", "[", "dll_name", ",", "timestamp", "]", ")", "return", "delay_import_timestamps" ]
Run strelka on the DNA bams .
def run_strelka_full ( job , tumor_bam , normal_bam , univ_options , strelka_options ) : work_dir = os . getcwd ( ) input_files = { 'tumor.bam' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , 'tumor.bam.bai' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam.bai' ] , 'normal.bam' : normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , 'normal.bam.bai' : normal_bam [ 'normal_dna_fix_pg_sorted.bam.bai' ] , 'genome.fa.tar.gz' : strelka_options [ 'genome_fasta' ] , 'genome.fa.fai.tar.gz' : strelka_options [ 'genome_fai' ] , 'config.ini.tar.gz' : strelka_options [ 'config_file' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) for key in ( 'genome.fa' , 'genome.fa.fai' , 'config.ini' ) : input_files [ key ] = untargz ( input_files [ key + '.tar.gz' ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } parameters = [ input_files [ 'config.ini' ] , input_files [ 'tumor.bam' ] , input_files [ 'normal.bam' ] , input_files [ 'genome.fa' ] , str ( job . cores ) ] docker_call ( tool = 'strelka' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = strelka_options [ 'version' ] ) output_dict = { } for mutation_type in [ 'snvs' , 'indels' ] : output_dict [ mutation_type ] = job . fileStore . writeGlobalFile ( os . path . join ( work_dir , 'strelka_out' , 'results' , 'passed.somatic.' + mutation_type + '.vcf' ) ) job . fileStore . logToMaster ( 'Ran strelka on %s successfully' % univ_options [ 'patient' ] ) return output_dict
2,846
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/strelka.py#L105-L148
[ "def", "convert_to_experiment_list", "(", "experiments", ")", ":", "exp_list", "=", "experiments", "# Transform list if necessary", "if", "experiments", "is", "None", ":", "exp_list", "=", "[", "]", "elif", "isinstance", "(", "experiments", ",", "Experiment", ")", ":", "exp_list", "=", "[", "experiments", "]", "elif", "type", "(", "experiments", ")", "is", "dict", ":", "exp_list", "=", "[", "Experiment", ".", "from_json", "(", "name", ",", "spec", ")", "for", "name", ",", "spec", "in", "experiments", ".", "items", "(", ")", "]", "# Validate exp_list", "if", "(", "type", "(", "exp_list", ")", "is", "list", "and", "all", "(", "isinstance", "(", "exp", ",", "Experiment", ")", "for", "exp", "in", "exp_list", ")", ")", ":", "if", "len", "(", "exp_list", ")", ">", "1", ":", "logger", ".", "warning", "(", "\"All experiments will be \"", "\"using the same SearchAlgorithm.\"", ")", "else", ":", "raise", "TuneError", "(", "\"Invalid argument: {}\"", ".", "format", "(", "experiments", ")", ")", "return", "exp_list" ]
A wwrapper to unmerge the strelka snvs and indels
def wrap_unmerge ( job , strelka_out , chromosomes , strelka_options , univ_options ) : return { 'snvs' : job . addChildJobFn ( unmerge , strelka_out [ 'snvs' ] , 'strelka/snv' , chromosomes , strelka_options , univ_options ) . rv ( ) , 'indels' : job . addChildJobFn ( unmerge , strelka_out [ 'indels' ] , 'strelka/indel' , chromosomes , strelka_options , univ_options ) . rv ( ) }
2,847
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/strelka.py#L165-L190
[ "def", "construct_error_message", "(", "driver_id", ",", "error_type", ",", "message", ",", "timestamp", ")", ":", "builder", "=", "flatbuffers", ".", "Builder", "(", "0", ")", "driver_offset", "=", "builder", ".", "CreateString", "(", "driver_id", ".", "binary", "(", ")", ")", "error_type_offset", "=", "builder", ".", "CreateString", "(", "error_type", ")", "message_offset", "=", "builder", ".", "CreateString", "(", "message", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataStart", "(", "builder", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddDriverId", "(", "builder", ",", "driver_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddType", "(", "builder", ",", "error_type_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddErrorMessage", "(", "builder", ",", "message_offset", ")", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataAddTimestamp", "(", "builder", ",", "timestamp", ")", "error_data_offset", "=", "ray", ".", "core", ".", "generated", ".", "ErrorTableData", ".", "ErrorTableDataEnd", "(", "builder", ")", "builder", ".", "Finish", "(", "error_data_offset", ")", "return", "bytes", "(", "builder", ".", "Output", "(", ")", ")" ]
Get the ISO time string from a timestamp or date obj . Returns current time str if no timestamp is passed
def get_iso_time_str ( timestamp : Union [ int , float , str , datetime ] = None ) -> str : if isinstance ( timestamp , ( int , float ) ) : maya_dt = maya . MayaDT ( timestamp ) elif isinstance ( timestamp , str ) : maya_dt = maya . when ( timestamp ) elif timestamp is None : maya_dt = maya . now ( ) else : raise ValueError ( f'`{type(timestamp)}` is not supported' ) return maya_dt . iso8601 ( )
2,848
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L27-L37
[ "def", "delete_advanced_configs", "(", "vm_name", ",", "datacenter", ",", "advanced_configs", ",", "service_instance", "=", "None", ")", ":", "datacenter_ref", "=", "salt", ".", "utils", ".", "vmware", ".", "get_datacenter", "(", "service_instance", ",", "datacenter", ")", "vm_ref", "=", "salt", ".", "utils", ".", "vmware", ".", "get_mor_by_property", "(", "service_instance", ",", "vim", ".", "VirtualMachine", ",", "vm_name", ",", "property_name", "=", "'name'", ",", "container_ref", "=", "datacenter_ref", ")", "config_spec", "=", "vim", ".", "vm", ".", "ConfigSpec", "(", ")", "removed_configs", "=", "_delete_advanced_config", "(", "config_spec", ",", "advanced_configs", ",", "vm_ref", ".", "config", ".", "extraConfig", ")", "if", "removed_configs", ":", "salt", ".", "utils", ".", "vmware", ".", "update_vm", "(", "vm_ref", ",", "config_spec", ")", "return", "{", "'removed_configs'", ":", "removed_configs", "}" ]
Truncates a value to a number of decimals places
def truncate ( value : Decimal , n_digits : int ) -> Decimal : return Decimal ( math . trunc ( value * ( 10 ** n_digits ) ) ) / ( 10 ** n_digits )
2,849
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L40-L42
[ "def", "get_event_stream", "(", "self", ")", ":", "if", "self", ".", "_event_stream", "is", "None", ":", "self", ".", "_event_stream", "=", "WVAEventStream", "(", "self", ".", "_http_client", ")", "return", "self", ".", "_event_stream" ]
Truncates a value to the number of decimals corresponding to the currency
def truncate_to ( value : Decimal , currency : str ) -> Decimal : decimal_places = DECIMALS . get ( currency . upper ( ) , 2 ) return truncate ( value , decimal_places )
2,850
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L45-L48
[ "def", "if_range", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_RANGE", ")", ")" ]
Truncates money amount to the number of decimals corresponding to the currency
def truncate_money ( money : Money ) -> Money : amount = truncate_to ( money . amount , money . currency ) return Money ( amount , money . currency )
2,851
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L51-L54
[ "def", "ProcessLegacyClient", "(", "self", ",", "ping", ",", "client", ")", ":", "labels", "=", "self", ".", "_GetClientLabelsList", "(", "client", ")", "system", "=", "client", ".", "Get", "(", "client", ".", "Schema", ".", "SYSTEM", ",", "\"Unknown\"", ")", "uname", "=", "client", ".", "Get", "(", "client", ".", "Schema", ".", "UNAME", ",", "\"Unknown\"", ")", "self", ".", "_Process", "(", "labels", ",", "ping", ",", "system", ",", "uname", ")" ]
Returns a lower and upper value separated by a spread percentage
def spread_value ( value : Decimal , spread_p : Decimal ) -> Tuple [ Decimal , Decimal ] : upper = value * ( 1 + spread_p ) lower = value / ( 1 + spread_p ) return lower , upper
2,852
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L57-L61
[ "def", "getCursor", "(", "self", ")", ":", "if", "self", ".", "connection", "is", "None", ":", "self", ".", "Connect", "(", ")", "return", "self", ".", "connection", ".", "cursor", "(", "MySQLdb", ".", "cursors", ".", "DictCursor", ")" ]
Returns a lower and upper money amount separated by a spread percentage
def spread_money ( money : Money , spread_p : Decimal ) -> Tuple [ Money , Money ] : upper , lower = spread_value ( money . amount , spread_p ) return Money ( upper , money . currency ) , Money ( lower , money . currency )
2,853
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/utils.py#L64-L67
[ "def", "configure", "(", "cls", ",", "impl", ":", "\"Union[None, str, Type[Configurable]]\"", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "super", "(", "AsyncHTTPClient", ",", "cls", ")", ".", "configure", "(", "impl", ",", "*", "*", "kwargs", ")" ]
Checks if the english date is in valid range for conversion
def check_valid_ad_range ( date ) : if date < values . START_EN_DATE or date > values . END_EN_DATE : raise ValueError ( "Date out of range" ) return True
2,854
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/functions.py#L8-L14
[ "def", "pop", "(", "self", ",", "key", ",", "default", "=", "UndefinedKey", ")", ":", "if", "default", "!=", "UndefinedKey", "and", "key", "not", "in", "self", ":", "return", "default", "value", "=", "self", ".", "get", "(", "key", ",", "UndefinedKey", ")", "lst", "=", "ConfigTree", ".", "parse_key", "(", "key", ")", "parent", "=", "self", ".", "KEY_SEP", ".", "join", "(", "lst", "[", "0", ":", "-", "1", "]", ")", "child", "=", "lst", "[", "-", "1", "]", "if", "parent", ":", "self", ".", "get", "(", "parent", ")", ".", "__delitem__", "(", "child", ")", "else", ":", "self", ".", "__delitem__", "(", "child", ")", "return", "value" ]
Checks if the nepali date is in valid range for conversion
def check_valid_bs_range ( date ) : ERR_MSG = "%s out of range" % str ( date ) if date . year < values . START_NP_YEAR or date . year > values . END_NP_YEAR : raise ValueError ( ERR_MSG ) if date . month < 1 or date . month > 12 : raise ValueError ( ERR_MSG ) if date . day < 1 or date . day > values . NEPALI_MONTH_DAY_DATA [ date . year ] [ date . month - 1 ] : raise ValueError ( ERR_MSG ) return True
2,855
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/functions.py#L17-L29
[ "def", "hierarchical_group_items", "(", "item_list", ",", "groupids_list", ")", ":", "# Construct a defaultdict type with the appropriate number of levels", "num_groups", "=", "len", "(", "groupids_list", ")", "leaf_type", "=", "partial", "(", "defaultdict", ",", "list", ")", "if", "num_groups", ">", "1", ":", "node_type", "=", "leaf_type", "for", "_", "in", "range", "(", "len", "(", "groupids_list", ")", "-", "2", ")", ":", "node_type", "=", "partial", "(", "defaultdict", ",", "node_type", ")", "root_type", "=", "node_type", "elif", "num_groups", "==", "1", ":", "root_type", "=", "list", "else", ":", "raise", "ValueError", "(", "'must suply groupids'", ")", "tree", "=", "defaultdict", "(", "root_type", ")", "#", "groupid_tuple_list", "=", "list", "(", "zip", "(", "*", "groupids_list", ")", ")", "for", "groupid_tuple", ",", "item", "in", "zip", "(", "groupid_tuple_list", ",", "item_list", ")", ":", "node", "=", "tree", "for", "groupid", "in", "groupid_tuple", ":", "node", "=", "node", "[", "groupid", "]", "node", ".", "append", "(", "item", ")", "return", "tree" ]
Convert a number to nepali
def nepali_number ( number ) : nepnum = "" for n in str ( number ) : nepnum += values . NEPDIGITS [ int ( n ) ] return nepnum
2,856
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/functions.py#L31-L38
[ "def", "get_datastore_files", "(", "service_instance", ",", "directory", ",", "datastores", ",", "container_object", ",", "browser_spec", ")", ":", "files", "=", "[", "]", "datastore_objects", "=", "get_datastores", "(", "service_instance", ",", "container_object", ",", "datastore_names", "=", "datastores", ")", "for", "datobj", "in", "datastore_objects", ":", "try", ":", "task", "=", "datobj", ".", "browser", ".", "SearchDatastore_Task", "(", "datastorePath", "=", "'[{}] {}'", ".", "format", "(", "datobj", ".", "name", ",", "directory", ")", ",", "searchSpec", "=", "browser_spec", ")", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "salt", ".", "exceptions", ".", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "try", ":", "files", ".", "append", "(", "salt", ".", "utils", ".", "vmware", ".", "wait_for_task", "(", "task", ",", "directory", ",", "'query virtual machine files'", ")", ")", "except", "salt", ".", "exceptions", ".", "VMwareFileNotFoundError", ":", "pass", "return", "files" ]
Returns a fields dict for this serializer with a geometry field added .
def get_fields ( self ) : fields = super ( GeoModelSerializer , self ) . get_fields ( ) # Set the geometry field name when it's undeclared. if not self . Meta . geom_field : for name , field in fields . items ( ) : if isinstance ( field , GeometryField ) : self . Meta . geom_field = name break return fields
2,857
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/serializers.py#L31-L42
[ "def", "get_correlation_table", "(", "self", ",", "chain", "=", "0", ",", "parameters", "=", "None", ",", "caption", "=", "\"Parameter Correlations\"", ",", "label", "=", "\"tab:parameter_correlations\"", ")", ":", "parameters", ",", "cor", "=", "self", ".", "get_correlations", "(", "chain", "=", "chain", ",", "parameters", "=", "parameters", ")", "return", "self", ".", "_get_2d_latex_table", "(", "parameters", ",", "cor", ",", "caption", ",", "label", ")" ]
A wrapper for the the entire MuSE sub - graph .
def run_muse_with_merge ( job , tumor_bam , normal_bam , univ_options , muse_options ) : spawn = job . wrapJobFn ( run_muse , tumor_bam , normal_bam , univ_options , muse_options , disk = '100M' ) . encapsulate ( ) merge = job . wrapJobFn ( merge_perchrom_vcfs , spawn . rv ( ) , disk = '100M' ) job . addChild ( spawn ) spawn . addChild ( merge ) return merge . rv ( )
2,858
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/muse.py#L44-L60
[ "def", "_sync_with_file", "(", "self", ")", ":", "self", ".", "_records", "=", "[", "]", "i", "=", "-", "1", "for", "i", ",", "line", "in", "self", ".", "_enum_lines", "(", ")", ":", "self", ".", "_records", ".", "append", "(", "None", ")", "self", ".", "_last_synced_index", "=", "i" ]
Spawn a MuSE job for each chromosome on the DNA bams .
def run_muse ( job , tumor_bam , normal_bam , univ_options , muse_options ) : # Get a list of chromosomes to handle if muse_options [ 'chromosomes' ] : chromosomes = muse_options [ 'chromosomes' ] else : chromosomes = sample_chromosomes ( job , muse_options [ 'genome_fai' ] ) perchrom_muse = defaultdict ( ) for chrom in chromosomes : call = job . addChildJobFn ( run_muse_perchrom , tumor_bam , normal_bam , univ_options , muse_options , chrom , disk = PromisedRequirement ( muse_disk , tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , muse_options [ 'genome_fasta' ] ) , memory = '6G' ) sump = call . addChildJobFn ( run_muse_sump_perchrom , call . rv ( ) , univ_options , muse_options , chrom , disk = PromisedRequirement ( muse_sump_disk , muse_options [ 'dbsnp_vcf' ] ) , memory = '6G' ) perchrom_muse [ chrom ] = sump . rv ( ) return perchrom_muse
2,859
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/muse.py#L63-L101
[ "def", "_write_header", "(", "self", ",", "epoch_data", ":", "EpochData", ")", "->", "None", ":", "self", ".", "_variables", "=", "self", ".", "_variables", "or", "list", "(", "next", "(", "iter", "(", "epoch_data", ".", "values", "(", ")", ")", ")", ".", "keys", "(", ")", ")", "self", ".", "_streams", "=", "epoch_data", ".", "keys", "(", ")", "header", "=", "[", "'\"epoch_id\"'", "]", "for", "stream_name", "in", "self", ".", "_streams", ":", "header", "+=", "[", "stream_name", "+", "'_'", "+", "var", "for", "var", "in", "self", ".", "_variables", "]", "with", "open", "(", "self", ".", "_file_path", ",", "'a'", ")", "as", "file", ":", "file", ".", "write", "(", "self", ".", "_delimiter", ".", "join", "(", "header", ")", "+", "'\\n'", ")", "self", ".", "_header_written", "=", "True" ]
Run MuSE call on a single chromosome in the input bams .
def run_muse_perchrom ( job , tumor_bam , normal_bam , univ_options , muse_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'tumor.bam' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , 'tumor.bam.bai' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam.bai' ] , 'normal.bam' : normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , 'normal.bam.bai' : normal_bam [ 'normal_dna_fix_pg_sorted.bam.bai' ] , 'genome.fa.tar.gz' : muse_options [ 'genome_fasta' ] , 'genome.fa.fai.tar.gz' : muse_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) for key in ( 'genome.fa' , 'genome.fa.fai' ) : input_files [ key ] = untargz ( input_files [ key + '.tar.gz' ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } output_prefix = os . path . join ( work_dir , chrom ) parameters = [ 'call' , '-f' , input_files [ 'genome.fa' ] , '-r' , chrom , '-O' , docker_path ( output_prefix ) , input_files [ 'tumor.bam' ] , input_files [ 'normal.bam' ] ] docker_call ( tool = 'muse' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = muse_options [ 'version' ] ) outfile = job . fileStore . writeGlobalFile ( '' . join ( [ output_prefix , '.MuSE.txt' ] ) ) job . fileStore . logToMaster ( 'Ran MuSE on %s:%s successfully' % ( univ_options [ 'patient' ] , chrom ) ) return outfile
2,860
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/muse.py#L104-L142
[ "def", "compare", "(", "self", ",", "other", ",", "components", "=", "[", "]", ")", ":", "lg", ".", "debug", "(", "\"Comparing publish %s (%s) and %s (%s)\"", "%", "(", "self", ".", "name", ",", "self", ".", "storage", "or", "\"local\"", ",", "other", ".", "name", ",", "other", ".", "storage", "or", "\"local\"", ")", ")", "diff", ",", "equal", "=", "(", "{", "}", ",", "{", "}", ")", "for", "component", ",", "snapshots", "in", "self", ".", "components", ".", "items", "(", ")", ":", "if", "component", "not", "in", "list", "(", "other", ".", "components", ".", "keys", "(", ")", ")", ":", "# Component is missing in other", "diff", "[", "component", "]", "=", "snapshots", "continue", "equal_snapshots", "=", "list", "(", "set", "(", "snapshots", ")", ".", "intersection", "(", "other", ".", "components", "[", "component", "]", ")", ")", "if", "equal_snapshots", ":", "lg", ".", "debug", "(", "\"Equal snapshots for %s: %s\"", "%", "(", "component", ",", "equal_snapshots", ")", ")", "equal", "[", "component", "]", "=", "equal_snapshots", "diff_snapshots", "=", "list", "(", "set", "(", "snapshots", ")", ".", "difference", "(", "other", ".", "components", "[", "component", "]", ")", ")", "if", "diff_snapshots", ":", "lg", ".", "debug", "(", "\"Different snapshots for %s: %s\"", "%", "(", "component", ",", "diff_snapshots", ")", ")", "diff", "[", "component", "]", "=", "diff_snapshots", "return", "(", "diff", ",", "equal", ")" ]
Run MuSE sump on the MuSE call generated vcf .
def run_muse_sump_perchrom ( job , muse_output , univ_options , muse_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'MuSE.txt' : muse_output , 'dbsnp_coding.vcf.gz' : muse_options [ 'dbsnp_vcf' ] , 'dbsnp_coding.vcf.gz.tbi.tmp' : muse_options [ 'dbsnp_tbi' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) tbi = os . path . splitext ( input_files [ 'dbsnp_coding.vcf.gz.tbi.tmp' ] ) [ 0 ] time . sleep ( 2 ) shutil . copy ( input_files [ 'dbsnp_coding.vcf.gz.tbi.tmp' ] , tbi ) os . chmod ( tbi , 0777 ) open ( tbi , 'a' ) . close ( ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } output_file = '' . join ( [ work_dir , '/' , chrom , '.vcf' ] ) parameters = [ 'sump' , '-I' , input_files [ 'MuSE.txt' ] , '-O' , docker_path ( output_file ) , '-D' , input_files [ 'dbsnp_coding.vcf.gz' ] , '-E' ] docker_call ( tool = 'muse' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = muse_options [ 'version' ] ) outfile = job . fileStore . writeGlobalFile ( output_file ) export_results ( job , outfile , output_file , univ_options , subfolder = 'mutations/muse' ) job . fileStore . logToMaster ( 'Ran MuSE sump on %s:%s successfully' % ( univ_options [ 'patient' ] , chrom ) ) return outfile
2,861
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/muse.py#L145-L182
[ "def", "broadcast", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "from", ".", "dataarray", "import", "DataArray", "from", ".", "dataset", "import", "Dataset", "exclude", "=", "kwargs", ".", "pop", "(", "'exclude'", ",", "None", ")", "if", "exclude", "is", "None", ":", "exclude", "=", "set", "(", ")", "if", "kwargs", ":", "raise", "TypeError", "(", "'broadcast() got unexpected keyword arguments: %s'", "%", "list", "(", "kwargs", ")", ")", "args", "=", "align", "(", "*", "args", ",", "join", "=", "'outer'", ",", "copy", "=", "False", ",", "exclude", "=", "exclude", ")", "common_coords", "=", "OrderedDict", "(", ")", "dims_map", "=", "OrderedDict", "(", ")", "for", "arg", "in", "args", ":", "for", "dim", "in", "arg", ".", "dims", ":", "if", "dim", "not", "in", "common_coords", "and", "dim", "not", "in", "exclude", ":", "dims_map", "[", "dim", "]", "=", "arg", ".", "sizes", "[", "dim", "]", "if", "dim", "in", "arg", ".", "coords", ":", "common_coords", "[", "dim", "]", "=", "arg", ".", "coords", "[", "dim", "]", ".", "variable", "def", "_set_dims", "(", "var", ")", ":", "# Add excluded dims to a copy of dims_map", "var_dims_map", "=", "dims_map", ".", "copy", "(", ")", "for", "dim", "in", "exclude", ":", "with", "suppress", "(", "ValueError", ")", ":", "# ignore dim not in var.dims", "var_dims_map", "[", "dim", "]", "=", "var", ".", "shape", "[", "var", ".", "dims", ".", "index", "(", "dim", ")", "]", "return", "var", ".", "set_dims", "(", "var_dims_map", ")", "def", "_broadcast_array", "(", "array", ")", ":", "data", "=", "_set_dims", "(", "array", ".", "variable", ")", "coords", "=", "OrderedDict", "(", "array", ".", "coords", ")", "coords", ".", "update", "(", "common_coords", ")", "return", "DataArray", "(", "data", ",", "coords", ",", "data", ".", "dims", ",", "name", "=", "array", ".", "name", ",", "attrs", "=", "array", ".", "attrs", ")", "def", "_broadcast_dataset", "(", "ds", ")", ":", "data_vars", "=", "OrderedDict", "(", "(", "k", ",", "_set_dims", "(", "ds", ".", "variables", "[", "k", "]", ")", ")", "for", "k", "in", "ds", ".", "data_vars", ")", "coords", "=", "OrderedDict", "(", "ds", ".", "coords", ")", "coords", ".", "update", "(", "common_coords", ")", "return", "Dataset", "(", "data_vars", ",", "coords", ",", "ds", ".", "attrs", ")", "result", "=", "[", "]", "for", "arg", "in", "args", ":", "if", "isinstance", "(", "arg", ",", "DataArray", ")", ":", "result", ".", "append", "(", "_broadcast_array", "(", "arg", ")", ")", "elif", "isinstance", "(", "arg", ",", "Dataset", ")", ":", "result", ".", "append", "(", "_broadcast_dataset", "(", "arg", ")", ")", "else", ":", "raise", "ValueError", "(", "'all input must be Dataset or DataArray objects'", ")", "return", "tuple", "(", "result", ")" ]
Returns an ndarray of linear breaks .
def linear ( self , limits = None , k = 5 ) : start , stop = limits or ( self . minval , self . maxval ) return np . linspace ( start , stop , k )
2,862
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/models.py#L81-L84
[ "def", "set_text", "(", "self", ",", "text", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "text", "is", "None", ":", "text", "=", "{", "}", "text", ".", "update", "(", "popdict", "(", "kwargs", ",", "_registered_kw", ")", ")", "if", "'Creation Time'", "in", "text", "and", "not", "isinstance", "(", "text", "[", "'Creation Time'", "]", ",", "(", "basestring", ",", "bytes", ")", ")", ":", "text", "[", "'Creation Time'", "]", "=", "datetime", ".", "datetime", "(", "*", "(", "check_time", "(", "text", "[", "'Creation Time'", "]", ")", "[", ":", "6", "]", ")", ")", ".", "isoformat", "(", ")", "self", ".", "text", "=", "text" ]
Returns an ndarray of quantile breaks .
def quantiles ( self , k = 5 ) : arr = self . array ( ) q = list ( np . linspace ( 0 , 100 , k ) ) return np . percentile ( arr . compressed ( ) , q )
2,863
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/models.py#L86-L90
[ "def", "_open_data_stream", "(", "self", ")", ":", "@", "asyncio", ".", "coroutine", "def", "connection_factory", "(", "address", ":", "Tuple", "[", "int", ",", "int", "]", ")", ":", "self", ".", "_data_connection", "=", "yield", "from", "self", ".", "_acquire_connection", "(", "address", "[", "0", "]", ",", "address", "[", "1", "]", ")", "return", "self", ".", "_data_connection", "self", ".", "_data_stream", "=", "yield", "from", "self", ".", "_commander", ".", "setup_data_stream", "(", "connection_factory", ")", "self", ".", "_response", ".", "data_address", "=", "self", ".", "_data_connection", ".", "address", "read_callback", "=", "functools", ".", "partial", "(", "self", ".", "event_dispatcher", ".", "notify", ",", "self", ".", "Event", ".", "transfer_receive_data", ")", "self", ".", "_data_stream", ".", "data_event_dispatcher", ".", "add_read_listener", "(", "read_callback", ")", "write_callback", "=", "functools", ".", "partial", "(", "self", ".", "event_dispatcher", ".", "notify", ",", "self", ".", "Event", ".", "transfer_send_data", ")", "self", ".", "_data_stream", ".", "data_event_dispatcher", ".", "add_write_listener", "(", "write_callback", ")" ]
Normalize data to a list of floats .
def to_python ( self , value ) : if not value : return [ ] return map ( super ( CommaSepFloatField , self ) . to_python , value . split ( ',' ) )
2,864
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/forms/fields.py#L27-L31
[ "def", "icao", "(", "msg", ")", ":", "DF", "=", "df", "(", "msg", ")", "if", "DF", "in", "(", "11", ",", "17", ",", "18", ")", ":", "addr", "=", "msg", "[", "2", ":", "8", "]", "elif", "DF", "in", "(", "0", ",", "4", ",", "5", ",", "16", ",", "20", ",", "21", ")", ":", "c0", "=", "bin2int", "(", "crc", "(", "msg", ",", "encode", "=", "True", ")", ")", "c1", "=", "hex2int", "(", "msg", "[", "-", "6", ":", "]", ")", "addr", "=", "'%06X'", "%", "(", "c0", "^", "c1", ")", "else", ":", "addr", "=", "None", "return", "addr" ]
Run validators for each item separately .
def run_validators ( self , values ) : for val in values : super ( CommaSepFloatField , self ) . run_validators ( val )
2,865
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/forms/fields.py#L33-L36
[ "def", "send_login_code", "(", "self", ",", "code", ",", "context", ",", "*", "*", "kwargs", ")", ":", "from_number", "=", "self", ".", "from_number", "or", "getattr", "(", "settings", ",", "'DEFAULT_FROM_NUMBER'", ")", "sms_content", "=", "render_to_string", "(", "self", ".", "template_name", ",", "context", ")", "self", ".", "twilio_client", ".", "messages", ".", "create", "(", "to", "=", "code", ".", "user", ".", "phone_number", ",", "from_", "=", "from_number", ",", "body", "=", "sms_content", ")" ]
Returns a GEOS Polygon from bounding box values .
def to_python ( self , value ) : value = super ( BoundingBoxField , self ) . to_python ( value ) try : bbox = gdal . OGRGeometry . from_bbox ( value ) . geos except ( ValueError , AttributeError ) : return [ ] bbox . srid = self . srid return bbox
2,866
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/forms/fields.py#L46-L54
[ "def", "base_mortality_rate", "(", "self", ",", "index", ":", "pd", ".", "Index", ")", "->", "pd", ".", "Series", ":", "return", "pd", ".", "Series", "(", "self", ".", "config", ".", "mortality_rate", ",", "index", "=", "index", ")" ]
A wrapper for the the entire MuTect sub - graph .
def run_mutect_with_merge ( job , tumor_bam , normal_bam , univ_options , mutect_options ) : spawn = job . wrapJobFn ( run_mutect , tumor_bam , normal_bam , univ_options , mutect_options ) . encapsulate ( ) merge = job . wrapJobFn ( merge_perchrom_vcfs , spawn . rv ( ) ) job . addChild ( spawn ) spawn . addChild ( merge ) return merge . rv ( )
2,867
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/mutect.py#L41-L57
[ "def", "_sync_with_file", "(", "self", ")", ":", "self", ".", "_records", "=", "[", "]", "i", "=", "-", "1", "for", "i", ",", "line", "in", "self", ".", "_enum_lines", "(", ")", ":", "self", ".", "_records", ".", "append", "(", "None", ")", "self", ".", "_last_synced_index", "=", "i" ]
Spawn a MuTect job for each chromosome on the DNA bams .
def run_mutect ( job , tumor_bam , normal_bam , univ_options , mutect_options ) : # Get a list of chromosomes to handle if mutect_options [ 'chromosomes' ] : chromosomes = mutect_options [ 'chromosomes' ] else : chromosomes = sample_chromosomes ( job , mutect_options [ 'genome_fai' ] ) perchrom_mutect = defaultdict ( ) for chrom in chromosomes : perchrom_mutect [ chrom ] = job . addChildJobFn ( run_mutect_perchrom , tumor_bam , normal_bam , univ_options , mutect_options , chrom , memory = '6G' , disk = PromisedRequirement ( mutect_disk , tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , mutect_options [ 'genome_fasta' ] , mutect_options [ 'dbsnp_vcf' ] , mutect_options [ 'cosmic_vcf' ] ) ) . rv ( ) return perchrom_mutect
2,868
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/mutect.py#L60-L93
[ "def", "add_server", "(", "self", ",", "server", ")", ":", "if", "not", "isinstance", "(", "server", ",", "WBEMServer", ")", ":", "raise", "TypeError", "(", "\"Server argument of add_server() must be a \"", "\"WBEMServer object\"", ")", "server_id", "=", "server", ".", "url", "if", "server_id", "in", "self", ".", "_servers", ":", "raise", "ValueError", "(", "_format", "(", "\"WBEM server already known by listener: {0!A}\"", ",", "server_id", ")", ")", "# Create dictionary entries for this server", "self", ".", "_servers", "[", "server_id", "]", "=", "server", "self", ".", "_owned_subscriptions", "[", "server_id", "]", "=", "[", "]", "self", ".", "_owned_filters", "[", "server_id", "]", "=", "[", "]", "self", ".", "_owned_destinations", "[", "server_id", "]", "=", "[", "]", "# Recover any owned destination, filter, and subscription instances", "# that exist on this server", "this_host", "=", "getfqdn", "(", ")", "dest_name_pattern", "=", "re", ".", "compile", "(", "_format", "(", "r'^pywbemdestination:owned:{0}:{1}:[^:]*$'", ",", "this_host", ",", "self", ".", "_subscription_manager_id", ")", ")", "dest_insts", "=", "server", ".", "conn", ".", "EnumerateInstances", "(", "DESTINATION_CLASSNAME", ",", "namespace", "=", "server", ".", "interop_ns", ")", "for", "inst", "in", "dest_insts", ":", "if", "re", ".", "match", "(", "dest_name_pattern", ",", "inst", ".", "path", ".", "keybindings", "[", "'Name'", "]", ")", "and", "inst", ".", "path", ".", "keybindings", "[", "'SystemName'", "]", "==", "this_host", ":", "self", ".", "_owned_destinations", "[", "server_id", "]", ".", "append", "(", "inst", ")", "filter_name_pattern", "=", "re", ".", "compile", "(", "_format", "(", "r'^pywbemfilter:owned:{0}:{1}:[^:]*:[^:]*$'", ",", "this_host", ",", "self", ".", "_subscription_manager_id", ")", ")", "filter_insts", "=", "server", ".", "conn", ".", "EnumerateInstances", "(", "FILTER_CLASSNAME", ",", "namespace", "=", "server", ".", "interop_ns", ")", "for", "inst", "in", "filter_insts", ":", "if", "re", ".", "match", "(", "filter_name_pattern", ",", "inst", ".", "path", ".", "keybindings", "[", "'Name'", "]", ")", "and", "inst", ".", "path", ".", "keybindings", "[", "'SystemName'", "]", "==", "this_host", ":", "self", ".", "_owned_filters", "[", "server_id", "]", ".", "append", "(", "inst", ")", "sub_insts", "=", "server", ".", "conn", ".", "EnumerateInstances", "(", "SUBSCRIPTION_CLASSNAME", ",", "namespace", "=", "server", ".", "interop_ns", ")", "owned_filter_paths", "=", "[", "inst", ".", "path", "for", "inst", "in", "self", ".", "_owned_filters", "[", "server_id", "]", "]", "owned_destination_paths", "=", "[", "inst", ".", "path", "for", "inst", "in", "self", ".", "_owned_destinations", "[", "server_id", "]", "]", "for", "inst", "in", "sub_insts", ":", "if", "inst", ".", "path", ".", "keybindings", "[", "'Filter'", "]", "in", "owned_filter_paths", "or", "inst", ".", "path", ".", "keybindings", "[", "'Handler'", "]", "in", "owned_destination_paths", ":", "self", ".", "_owned_subscriptions", "[", "server_id", "]", ".", "append", "(", "inst", ")", "return", "server_id" ]
Run MuTect call on a single chromosome in the input bams .
def run_mutect_perchrom ( job , tumor_bam , normal_bam , univ_options , mutect_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'tumor.bam' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , 'tumor.bam.bai' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam.bai' ] , 'normal.bam' : normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , 'normal.bam.bai' : normal_bam [ 'normal_dna_fix_pg_sorted.bam.bai' ] , 'genome.fa.tar.gz' : mutect_options [ 'genome_fasta' ] , 'genome.fa.fai.tar.gz' : mutect_options [ 'genome_fai' ] , 'genome.dict.tar.gz' : mutect_options [ 'genome_dict' ] , 'cosmic.vcf.tar.gz' : mutect_options [ 'cosmic_vcf' ] , 'cosmic.vcf.idx.tar.gz' : mutect_options [ 'cosmic_idx' ] , 'dbsnp.vcf.gz' : mutect_options [ 'dbsnp_vcf' ] , 'dbsnp.vcf.idx.tar.gz' : mutect_options [ 'dbsnp_idx' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) # dbsnp.vcf should be bgzipped, but all others should be tar.gz'd input_files [ 'dbsnp.vcf' ] = gunzip ( input_files [ 'dbsnp.vcf.gz' ] ) for key in ( 'genome.fa' , 'genome.fa.fai' , 'genome.dict' , 'cosmic.vcf' , 'cosmic.vcf.idx' , 'dbsnp.vcf.idx' ) : input_files [ key ] = untargz ( input_files [ key + '.tar.gz' ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } mutout = '' . join ( [ work_dir , '/' , chrom , '.out' ] ) mutvcf = '' . join ( [ work_dir , '/' , chrom , '.vcf' ] ) parameters = [ '-R' , input_files [ 'genome.fa' ] , '--cosmic' , input_files [ 'cosmic.vcf' ] , '--dbsnp' , input_files [ 'dbsnp.vcf' ] , '--input_file:normal' , input_files [ 'normal.bam' ] , '--input_file:tumor' , input_files [ 'tumor.bam' ] , # '--tumor_lod', str(10), # '--initial_tumor_lod', str(4.0), '-L' , chrom , '--out' , docker_path ( mutout ) , '--vcf' , docker_path ( mutvcf ) ] java_xmx = mutect_options [ 'java_Xmx' ] if mutect_options [ 'java_Xmx' ] else univ_options [ 'java_Xmx' ] docker_call ( tool = 'mutect' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , java_xmx = java_xmx , tool_version = mutect_options [ 'version' ] ) output_file = job . fileStore . writeGlobalFile ( mutvcf ) export_results ( job , output_file , mutvcf , univ_options , subfolder = 'mutations/mutect' ) job . fileStore . logToMaster ( 'Ran MuTect on %s:%s successfully' % ( univ_options [ 'patient' ] , chrom ) ) return output_file
2,869
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/mutect.py#L96-L150
[ "def", "get_reveal_set", "(", ")", ":", "file_path", "=", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/story_set.txt\"", "story_topics", "=", "get_topic_set", "(", "file_path", ")", "file_path", "=", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/theme_set.txt\"", "theme_topics", "=", "get_topic_set", "(", "file_path", ")", "file_path", "=", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/attribute_set.txt\"", "attribute_topics", "=", "get_topic_set", "(", "file_path", ")", "file_path", "=", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/stance_set.txt\"", "stance_topics", "=", "get_topic_set", "(", "file_path", ")", "file_path", "=", "get_package_path", "(", ")", "+", "\"/twitter/res/topics/geographical_set.txt\"", "geographical_topics", "=", "get_topic_set", "(", "file_path", ")", "topics", "=", "story_topics", "|", "theme_topics", "|", "attribute_topics", "|", "stance_topics", "|", "geographical_topics", "return", "topics" ]
Process the MuTect vcf for accepted calls .
def process_mutect_vcf ( job , mutect_vcf , work_dir , univ_options ) : mutect_vcf = job . fileStore . readGlobalFile ( mutect_vcf ) with open ( mutect_vcf , 'r' ) as infile , open ( mutect_vcf + 'mutect_parsed.tmp' , 'w' ) as outfile : for line in infile : line = line . strip ( ) if line . startswith ( '#' ) : print ( line , file = outfile ) continue line = line . split ( '\t' ) if line [ 6 ] != 'REJECT' : print ( '\t' . join ( line ) , file = outfile ) return outfile . name
2,870
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/mutect.py#L153-L174
[ "def", "active_repositories", "(", "doc", ")", ":", "if", "doc", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "for", "repository_id", ",", "repo", "in", "doc", ".", "get", "(", "'repositories'", ",", "{", "}", ")", ".", "items", "(", ")", ":", "if", "repo", ".", "get", "(", "'state'", ")", "!=", "'deactivated'", ":", "repo", "[", "'id'", "]", "=", "repository_id", "repo", "[", "'organisation_id'", "]", "=", "doc", "[", "'_id'", "]", "yield", "repository_id", ",", "repo" ]
Returns universe the client is connected to . See Universe .
def get_universe ( self , as_str = False ) : result = self . _iface . get_connected_universe ( ) if as_str : return Universe . get_alias ( result ) return result
2,871
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/libsteam/resources/utils.py#L136-L147
[ "def", "call_after", "(", "lag", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "wrapper", ".", "timer", ".", "cancel", "(", ")", "# Debounce", "wrapper", ".", "timer", "=", "threading", ".", "Timer", "(", "lag", ",", "func", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", "wrapper", ".", "timer", ".", "start", "(", ")", "wrapper", ".", "timer", "=", "threading", ".", "Timer", "(", "0", ",", "lambda", ":", "None", ")", "# timer.cancel now exists", "return", "wrapper", "return", "decorator" ]
lista modulos con los distintos niveles a logear y su nivel de debug
def EXTRA_LOGGING ( self ) : input_text = get ( 'EXTRA_LOGGING' , '' ) modules = input_text . split ( ',' ) if input_text : modules = input_text . split ( ',' ) modules = [ x . split ( ':' ) for x in modules ] else : modules = [ ] return modules
2,872
https://github.com/APSL/django-kaio/blob/b74b109bcfba31d973723bc419e2c95d190b80b7/kaio/mixins/logs.py#L26-L45
[ "def", "_IsCronJobFailing", "(", "self", ",", "cron_job", ")", ":", "status", "=", "cron_job", ".", "Get", "(", "cron_job", ".", "Schema", ".", "LAST_RUN_STATUS", ")", "if", "status", "is", "None", ":", "return", "False", "return", "status", ".", "status", "!=", "rdf_cronjobs", ".", "CronJobRunStatus", ".", "Status", ".", "OK" ]
Gets a NepDate object from gregorian calendar date
def from_ad_date ( cls , date ) : functions . check_valid_ad_range ( date ) days = values . START_EN_DATE - date # Add the required number of days to the start nepali date start_date = NepDate ( values . START_NP_YEAR , 1 , 1 ) # No need to update as addition already calls update return start_date + ( date - values . START_EN_DATE )
2,873
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/nepdate.py#L207-L215
[ "def", "_unregister_bundle_factories", "(", "self", ",", "bundle", ")", ":", "# type: (Bundle) -> None", "with", "self", ".", "__factories_lock", ":", "# Find out which factories must be removed", "to_remove", "=", "[", "factory_name", "for", "factory_name", "in", "self", ".", "__factories", "if", "self", ".", "get_factory_bundle", "(", "factory_name", ")", "is", "bundle", "]", "# Remove all of them", "for", "factory_name", "in", "to_remove", ":", "try", ":", "self", ".", "unregister_factory", "(", "factory_name", ")", "except", "ValueError", "as", "ex", ":", "_logger", ".", "warning", "(", "\"Error unregistering factory '%s': %s\"", ",", "factory_name", ",", "ex", ")" ]
Create and update an NepDate object for bikram sambat date
def from_bs_date ( cls , year , month , day ) : return NepDate ( year , month , day ) . update ( )
2,874
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/nepdate.py#L218-L220
[ "def", "remove", "(", "self", ",", "experiment", ")", ":", "try", ":", "project_path", "=", "self", ".", "projects", "[", "self", "[", "experiment", "]", "[", "'project'", "]", "]", "[", "'root'", "]", "except", "KeyError", ":", "return", "config_path", "=", "osp", ".", "join", "(", "project_path", ",", "'.project'", ",", "experiment", "+", "'.yml'", ")", "for", "f", "in", "[", "config_path", ",", "config_path", "+", "'~'", ",", "config_path", "+", "'.lck'", "]", ":", "if", "os", ".", "path", ".", "exists", "(", "f", ")", ":", "os", ".", "remove", "(", "f", ")", "del", "self", "[", "experiment", "]" ]
Returns the events today
def events_list ( self ) : evt = [ ] evt . extend ( events . NEPALI_EVENTS [ self . month , self . day ] ) evt . extend ( events . ENGLISH_EVENTS [ self . en_date . month , self . en_date . day ] ) return evt
2,875
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/nepdate.py#L269-L274
[ "def", "setOptimizedForIPTV", "(", "self", ",", "status", ",", "wifiInterfaceId", "=", "1", ",", "timeout", "=", "1", ")", ":", "namespace", "=", "Fritz", ".", "getServiceType", "(", "\"setOptimizedForIPTV\"", ")", "+", "str", "(", "wifiInterfaceId", ")", "uri", "=", "self", ".", "getControlURL", "(", "namespace", ")", "if", "status", ":", "setStatus", "=", "1", "else", ":", "setStatus", "=", "0", "arguments", "=", "{", "\"timeout\"", ":", "timeout", ",", "\"NewX_AVM-DE_IPTVoptimize\"", ":", "setStatus", "}", "self", ".", "execute", "(", "uri", ",", "namespace", ",", "\"X_AVM-DE_SetIPTVOptimized\"", ",", "*", "*", "arguments", ")" ]
Updates information about the NepDate
def update ( self ) : functions . check_valid_bs_range ( self ) # Here's a trick to find the gregorian date: # We find the number of days from earliest nepali date to the current # day. We then add the number of days to the earliest english date self . en_date = values . START_EN_DATE + ( self - NepDate ( values . START_NP_YEAR , 1 , 1 ) ) return self
2,876
https://github.com/nepalicalendar/nepalicalendar-py/blob/a589c28b8e085049f30a7287753476b59eca6f50/nepalicalendar/nepdate.py#L291-L305
[ "def", "_get_flags", "(", "flags", ")", ":", "if", "isinstance", "(", "flags", ",", "six", ".", "string_types", ")", ":", "flags", "=", "[", "flags", "]", "if", "isinstance", "(", "flags", ",", "Iterable", ")", "and", "not", "isinstance", "(", "flags", ",", "Mapping", ")", ":", "_flags_acc", "=", "[", "]", "for", "flag", "in", "flags", ":", "_flag", "=", "getattr", "(", "re", ",", "six", ".", "text_type", "(", "flag", ")", ".", "upper", "(", ")", ")", "if", "not", "isinstance", "(", "_flag", ",", "six", ".", "integer_types", ")", ":", "raise", "SaltInvocationError", "(", "'Invalid re flag given: {0}'", ".", "format", "(", "flag", ")", ")", "_flags_acc", ".", "append", "(", "_flag", ")", "return", "reduce", "(", "operator", ".", "__or__", ",", "_flags_acc", ")", "elif", "isinstance", "(", "flags", ",", "six", ".", "integer_types", ")", ":", "return", "flags", "else", ":", "raise", "SaltInvocationError", "(", "'Invalid re flags: \"{0}\", must be given either as a single flag '", "'string, a list of strings, or as an integer'", ".", "format", "(", "flags", ")", ")" ]
Downloads a supplied URL that points to an unencrypted unprotected file on Amazon S3 . The file is downloaded and a subsequently written to the jobstore and the return value is a the path to the file in the jobstore .
def get_file_from_s3 ( job , s3_url , encryption_key = None , write_to_jobstore = True ) : work_dir = job . fileStore . getLocalTempDir ( ) filename = '/' . join ( [ work_dir , os . path . basename ( s3_url ) ] ) # This is common to encrypted and unencrypted downloads download_call = [ 'curl' , '-fs' , '--retry' , '5' ] # If an encryption key was provided, use it to create teh headers that need to be injected into # the curl script and append to the call if encryption_key : key = generate_unique_key ( encryption_key , s3_url ) encoded_key = base64 . b64encode ( key ) encoded_key_md5 = base64 . b64encode ( hashlib . md5 ( key ) . digest ( ) ) h1 = 'x-amz-server-side-encryption-customer-algorithm:AES256' h2 = 'x-amz-server-side-encryption-customer-key:{}' . format ( encoded_key ) h3 = 'x-amz-server-side-encryption-customer-key-md5:{}' . format ( encoded_key_md5 ) download_call . extend ( [ '-H' , h1 , '-H' , h2 , '-H' , h3 ] ) # This is also common to both types of downloads download_call . extend ( [ s3_url , '-o' , filename ] ) try : subprocess . check_call ( download_call ) except subprocess . CalledProcessError : raise RuntimeError ( 'Curl returned a non-zero exit status processing %s. Do you' % s3_url + 'have premssions to access the file?' ) except OSError : raise RuntimeError ( 'Failed to find "curl". Install via "apt-get install curl"' ) assert os . path . exists ( filename ) if write_to_jobstore : filename = job . fileStore . writeGlobalFile ( filename ) return filename
2,877
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/attic/precision_immuno.py#L2159-L2191
[ "def", "timing", "(", "name", ",", "delta", ",", "rate", "=", "1", ",", "tags", "=", "None", ")", ":", "return", "client", "(", ")", ".", "timing", "(", "name", ",", "delta", ",", "rate", "=", "rate", ",", "tags", "=", "tags", ")" ]
Helper function for spatial lookups filters .
def filter_geometry ( queryset , * * filters ) : fieldname = geo_field ( queryset ) . name query = { '%s__%s' % ( fieldname , k ) : v for k , v in filters . items ( ) } return queryset . filter ( * * query )
2,878
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L16-L24
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "[", "[", "(", "\"mutation_burden\"", ",", ")", ",", "\"_mutation_burden.csv\"", "]", ",", "[", "(", "\"plot\"", ",", "\"signatures\"", ")", ",", "\"_signatures.pdf\"", "]", ",", "[", "(", "\"signatures\"", ",", ")", ",", "\"_signatures.csv\"", "]", "]", ":", "cur_file", "=", "\"%s%s\"", "%", "(", "out_base", ",", "ext", ")", "out", "=", "tz", ".", "update_in", "(", "out", ",", "key", ",", "lambda", "x", ":", "cur_file", ")", "all_files", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "cur_file", ")", ")", "return", "out_base", ",", "out", ",", "all_files" ]
Returns the GeometryField for a django or spillway GeoQuerySet .
def geo_field ( queryset ) : for field in queryset . model . _meta . fields : if isinstance ( field , models . GeometryField ) : return field raise exceptions . FieldDoesNotExist ( 'No GeometryField found' )
2,879
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L26-L31
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "[", "[", "(", "\"mutation_burden\"", ",", ")", ",", "\"_mutation_burden.csv\"", "]", ",", "[", "(", "\"plot\"", ",", "\"signatures\"", ")", ",", "\"_signatures.pdf\"", "]", ",", "[", "(", "\"signatures\"", ",", ")", ",", "\"_signatures.csv\"", "]", "]", ":", "cur_file", "=", "\"%s%s\"", "%", "(", "out_base", ",", "ext", ")", "out", "=", "tz", ".", "update_in", "(", "out", ",", "key", ",", "lambda", "x", ":", "cur_file", ")", "all_files", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "cur_file", ")", ")", "return", "out_base", ",", "out", ",", "all_files" ]
Returns the GeoQuerySet spatial reference identifier .
def get_srid ( queryset ) : try : srid = list ( six . viewvalues ( queryset . query . annotations ) ) [ 0 ] . srid except ( AttributeError , IndexError ) : srid = None return srid or geo_field ( queryset ) . srid
2,880
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L33-L39
[ "def", "GetEntries", "(", "self", ",", "parser_mediator", ",", "data", "=", "None", ",", "*", "*", "unused_kwargs", ")", ":", "# Walk through one of the torrent keys to ensure it's from a valid file.", "for", "key", ",", "value", "in", "iter", "(", "data", ".", "items", "(", ")", ")", ":", "if", "not", "'.torrent'", "in", "key", ":", "continue", "caption", "=", "value", ".", "get", "(", "'caption'", ")", "path", "=", "value", ".", "get", "(", "'path'", ")", "seedtime", "=", "value", ".", "get", "(", "'seedtime'", ")", "if", "not", "caption", "or", "not", "path", "or", "seedtime", "<", "0", ":", "raise", "errors", ".", "WrongBencodePlugin", "(", "self", ".", "NAME", ")", "for", "torrent", ",", "value", "in", "iter", "(", "data", ".", "items", "(", ")", ")", ":", "if", "not", "'.torrent'", "in", "torrent", ":", "continue", "event_data", "=", "UTorrentEventData", "(", ")", "event_data", ".", "caption", "=", "value", ".", "get", "(", "'caption'", ",", "None", ")", "event_data", ".", "path", "=", "value", ".", "get", "(", "'path'", ",", "None", ")", "# Convert seconds to minutes.", "seedtime", "=", "value", ".", "get", "(", "'seedtime'", ",", "None", ")", "event_data", ".", "seedtime", ",", "_", "=", "divmod", "(", "seedtime", ",", "60", ")", "# Create timeline events based on extracted values.", "for", "event_key", ",", "event_value", "in", "iter", "(", "value", ".", "items", "(", ")", ")", ":", "if", "event_key", "==", "'added_on'", ":", "date_time", "=", "dfdatetime_posix_time", ".", "PosixTime", "(", "timestamp", "=", "event_value", ")", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "date_time", ",", "definitions", ".", "TIME_DESCRIPTION_ADDED", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")", "elif", "event_key", "==", "'completed_on'", ":", "date_time", "=", "dfdatetime_posix_time", ".", "PosixTime", "(", "timestamp", "=", "event_value", ")", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "date_time", ",", "definitions", ".", "TIME_DESCRIPTION_FILE_DOWNLOADED", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")", "elif", "event_key", "==", "'modtimes'", ":", "for", "modtime", "in", "event_value", ":", "# Some values are stored as 0, skip those.", "if", "not", "modtime", ":", "continue", "date_time", "=", "dfdatetime_posix_time", ".", "PosixTime", "(", "timestamp", "=", "modtime", ")", "event", "=", "time_events", ".", "DateTimeValuesEvent", "(", "date_time", ",", "definitions", ".", "TIME_DESCRIPTION_MODIFICATION", ")", "parser_mediator", ".", "ProduceEventWithEventData", "(", "event", ",", "event_data", ")" ]
Returns a 1D array with higher dimensions aggregated using stat fn .
def agg_dims ( arr , stat ) : axis = None if arr . ndim > 2 : axis = 1 arr = arr . reshape ( arr . shape [ 0 ] , - 1 ) module = np . ma if hasattr ( arr , 'mask' ) else np return getattr ( module , stat ) ( arr , axis )
2,881
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L41-L53
[ "def", "cli", "(", "env", ",", "identifier", ",", "crt", ",", "csr", ",", "icc", ",", "key", ",", "notes", ")", ":", "template", "=", "{", "'id'", ":", "identifier", "}", "if", "crt", ":", "template", "[", "'certificate'", "]", "=", "open", "(", "crt", ")", ".", "read", "(", ")", "if", "key", ":", "template", "[", "'privateKey'", "]", "=", "open", "(", "key", ")", ".", "read", "(", ")", "if", "csr", ":", "template", "[", "'certificateSigningRequest'", "]", "=", "open", "(", "csr", ")", ".", "read", "(", ")", "if", "icc", ":", "template", "[", "'intermediateCertificate'", "]", "=", "open", "(", "icc", ")", ".", "read", "(", ")", "if", "notes", ":", "template", "[", "'notes'", "]", "=", "notes", "manager", "=", "SoftLayer", ".", "SSLManager", "(", "env", ".", "client", ")", "manager", ".", "edit_certificate", "(", "template", ")" ]
Returns the GeoQuerySet extent as a 4 - tuple .
def extent ( self , srid = None ) : expr = self . geo_field . name if srid : expr = geofn . Transform ( expr , srid ) expr = models . Extent ( expr ) clone = self . all ( ) name , val = clone . aggregate ( expr ) . popitem ( ) return val
2,882
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L87-L99
[ "def", "_get_purecn_dx_files", "(", "paired", ",", "out", ")", ":", "out_base", "=", "\"%s-dx\"", "%", "utils", ".", "splitext_plus", "(", "out", "[", "\"rds\"", "]", ")", "[", "0", "]", "all_files", "=", "[", "]", "for", "key", ",", "ext", "in", "[", "[", "(", "\"mutation_burden\"", ",", ")", ",", "\"_mutation_burden.csv\"", "]", ",", "[", "(", "\"plot\"", ",", "\"signatures\"", ")", ",", "\"_signatures.pdf\"", "]", ",", "[", "(", "\"signatures\"", ",", ")", ",", "\"_signatures.csv\"", "]", "]", ":", "cur_file", "=", "\"%s%s\"", "%", "(", "out_base", ",", "ext", ")", "out", "=", "tz", ".", "update_in", "(", "out", ",", "key", ",", "lambda", "x", ":", "cur_file", ")", "all_files", ".", "append", "(", "os", ".", "path", ".", "basename", "(", "cur_file", ")", ")", "return", "out_base", ",", "out", ",", "all_files" ]
Returns tranlated and scaled geometries suitable for Mapbox vector tiles .
def pbf ( self , bbox , geo_col = None , scale = 4096 ) : col = geo_col or self . geo_field . name w , s , e , n = bbox . extent trans = self . _trans_scale ( col , - w , - s , scale / ( e - w ) , scale / ( n - s ) ) g = AsText ( trans ) return self . annotate ( pbf = g )
2,883
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L110-L120
[ "def", "prt_summary_anno2ev", "(", "self", ",", "prt", "=", "sys", ".", "stdout", ")", ":", "prt", ".", "write", "(", "'**NOTE: No evidence codes in associations: {F}\\n'", ".", "format", "(", "F", "=", "self", ".", "filename", ")", ")" ]
Returns a GeoQuerySet intersecting a tile boundary .
def tile ( self , bbox , z = 0 , format = None , clip = True ) : # Tile grid uses 3857, but GeoJSON coordinates should be in 4326. tile_srid = 3857 bbox = getattr ( bbox , 'geos' , bbox ) clone = filter_geometry ( self , intersects = bbox ) field = clone . geo_field srid = field . srid sql = field . name try : tilew = self . tilewidths [ z ] except IndexError : tilew = self . tilewidths [ - 1 ] if bbox . srid != srid : bbox = bbox . transform ( srid , clone = True ) # Estimate tile width in degrees instead of meters. if bbox . srs . geographic : p = geos . Point ( tilew , tilew , srid = tile_srid ) p . transform ( srid ) tilew = p . x if clip : bufbox = bbox . buffer ( tilew ) sql = geofn . Intersection ( sql , bufbox . envelope ) sql = SimplifyPreserveTopology ( sql , tilew ) if format == 'pbf' : return clone . pbf ( bbox , geo_col = sql ) sql = geofn . Transform ( sql , 4326 ) return clone . annotate ( * * { format : sql } )
2,884
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L122-L157
[ "def", "sync_auth", "(", "self", ",", "vault_client", ",", "resources", ")", ":", "for", "auth", "in", "self", ".", "auths", "(", ")", ":", "auth", ".", "sync", "(", "vault_client", ")", "auth_resources", "=", "[", "x", "for", "x", "in", "resources", "if", "isinstance", "(", "x", ",", "(", "LDAP", ",", "UserPass", ")", ")", "]", "for", "resource", "in", "auth_resources", ":", "resource", ".", "sync", "(", "vault_client", ")", "return", "[", "x", "for", "x", "in", "resources", "if", "not", "isinstance", "(", "x", ",", "(", "LDAP", ",", "UserPass", ",", "AuditLog", ")", ")", "]" ]
Returns a list of ndarrays .
def arrays ( self , field_name = None ) : fieldname = field_name or self . raster_field . name arrays = [ ] for obj in self : arr = getattr ( obj , fieldname ) if isinstance ( arr , np . ndarray ) : arrays . append ( arr ) else : arrays . append ( obj . array ( ) ) return arrays
2,885
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L161-L175
[ "def", "parseReaderConfig", "(", "self", ",", "confdict", ")", ":", "logger", ".", "debug", "(", "'parseReaderConfig input: %s'", ",", "confdict", ")", "conf", "=", "{", "}", "for", "k", ",", "v", "in", "confdict", ".", "items", "(", ")", ":", "if", "not", "k", ".", "startswith", "(", "'Parameter'", ")", ":", "continue", "ty", "=", "v", "[", "'Type'", "]", "data", "=", "v", "[", "'Data'", "]", "vendor", "=", "None", "subtype", "=", "None", "try", ":", "vendor", ",", "subtype", "=", "v", "[", "'Vendor'", "]", ",", "v", "[", "'Subtype'", "]", "except", "KeyError", ":", "pass", "if", "ty", "==", "1023", ":", "if", "vendor", "==", "25882", "and", "subtype", "==", "37", ":", "tempc", "=", "struct", ".", "unpack", "(", "'!H'", ",", "data", ")", "[", "0", "]", "conf", ".", "update", "(", "temperature", "=", "tempc", ")", "else", ":", "conf", "[", "ty", "]", "=", "data", "return", "conf" ]
Returns list of ndarrays averaged to a given number of periods .
def aggregate_periods ( self , periods ) : try : fieldname = self . raster_field . name except TypeError : raise exceptions . FieldDoesNotExist ( 'Raster field not found' ) arrays = self . arrays ( fieldname ) arr = arrays [ 0 ] if len ( arrays ) > 1 : if getattr ( arr , 'ndim' , 0 ) > 2 : arrays = np . vstack ( arrays ) fill = getattr ( arr , 'fill_value' , None ) arr = np . ma . masked_values ( arrays , fill , copy = False ) # Try to reshape using equal sizes first and fall back to unequal # splits. try : means = arr . reshape ( ( periods , - 1 ) ) . mean ( axis = 1 ) except ValueError : means = np . array ( [ a . mean ( ) for a in np . array_split ( arr , periods ) ] ) obj = self [ 0 ] setattr ( obj , fieldname , means ) return [ obj ]
2,886
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L177-L202
[ "def", "fetch_and_create_image", "(", "self", ",", "url", ",", "image_title", ")", ":", "context", "=", "{", "\"file_url\"", ":", "url", ",", "\"foreign_title\"", ":", "image_title", ",", "}", "try", ":", "image_file", "=", "requests", ".", "get", "(", "url", ")", "local_image", "=", "Image", "(", "title", "=", "image_title", ",", "file", "=", "ImageFile", "(", "BytesIO", "(", "image_file", ".", "content", ")", ",", "name", "=", "image_title", ")", ")", "local_image", ".", "save", "(", ")", "return", "(", "local_image", ",", "context", ")", "except", "Exception", "as", "e", ":", "context", ".", "update", "(", "{", "\"exception\"", ":", "e", ",", "}", ")", "raise", "ImageCreationFailed", "(", "context", ",", "None", ")" ]
Returns the raster FileField instance on the model .
def raster_field ( self ) : for field in self . model . _meta . fields : if isinstance ( field , models . FileField ) : return field return False
2,887
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L218-L223
[ "def", "remove_stale_javascripts", "(", "portal", ")", ":", "logger", ".", "info", "(", "\"Removing stale javascripts ...\"", ")", "for", "js", "in", "JAVASCRIPTS_TO_REMOVE", ":", "logger", ".", "info", "(", "\"Unregistering JS %s\"", "%", "js", ")", "portal", ".", "portal_javascripts", ".", "unregisterResource", "(", "js", ")" ]
Returns a . zip archive of selected rasters .
def zipfiles ( self , path = None , arcdirname = 'data' ) : if path : fp = open ( path , 'w+b' ) else : prefix = '%s-' % arcdirname fp = tempfile . NamedTemporaryFile ( prefix = prefix , suffix = '.zip' ) with zipfile . ZipFile ( fp , mode = 'w' ) as zf : for obj in self : img = obj . image arcname = os . path . join ( arcdirname , os . path . basename ( img . name ) ) try : zf . write ( img . path , arcname = arcname ) except OSError : img . seek ( 0 ) zf . writestr ( arcname , img . read ( ) ) img . close ( ) fp . seek ( 0 ) zobj = self . model ( image = fp ) return [ zobj ]
2,888
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/query.py#L265-L284
[ "def", "_apply_cen_to_throats", "(", "self", ",", "p_cen", ",", "t_cen", ",", "t_norm", ",", "men_cen", ")", ":", "v", "=", "p_cen", "-", "t_cen", "sign", "=", "np", ".", "sign", "(", "np", ".", "sum", "(", "v", "*", "t_norm", ",", "axis", "=", "1", ")", ")", "c3", "=", "np", ".", "vstack", "(", "(", "men_cen", "*", "sign", ",", "men_cen", "*", "sign", ",", "men_cen", "*", "sign", ")", ")", ".", "T", "coords", "=", "t_cen", "+", "c3", "*", "t_norm", "return", "coords" ]
Initializes Steam API library .
def init ( self , app_id = None ) : self . set_app_id ( app_id ) err_msg = ( 'Unable to initialize. Check Steam client is running ' 'and Steam application ID is defined in steam_appid.txt or passed to Api.' ) if self . _lib . steam_init ( ) : try : _set_client ( self . _lib . Client ( ) ) self . utils = Utils ( ) self . current_user = CurrentUser ( ) self . friends = Friends ( ) self . groups = Groups ( ) self . apps = Applications ( ) self . overlay = Overlay ( ) self . screenshots = Screenshots ( ) except Exception as e : raise SteamApiStartupError ( '%s:\n%s' % ( err_msg , e ) ) else : raise SteamApiStartupError ( err_msg )
2,889
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/libsteam/resources/main.py#L125-L155
[ "def", "grep_log", "(", "self", ",", "expr", ",", "filename", "=", "'system.log'", ",", "from_mark", "=", "None", ")", ":", "matchings", "=", "[", "]", "pattern", "=", "re", ".", "compile", "(", "expr", ")", "with", "open", "(", "os", ".", "path", ".", "join", "(", "self", ".", "get_path", "(", ")", ",", "'logs'", ",", "filename", ")", ")", "as", "f", ":", "if", "from_mark", ":", "f", ".", "seek", "(", "from_mark", ")", "for", "line", "in", "f", ":", "m", "=", "pattern", ".", "search", "(", "line", ")", "if", "m", ":", "matchings", ".", "append", "(", "(", "line", ",", "m", ")", ")", "return", "matchings" ]
Download a dict of files to the given directory and modify the path to a docker - friendly one if requested .
def get_files_from_filestore ( job , files , work_dir , docker = False ) : for name in files . keys ( ) : outfile = job . fileStore . readGlobalFile ( files [ name ] , '/' . join ( [ work_dir , name ] ) ) # If the files will be sent to docker, we will mount work_dir to the container as /data and # we want the /data prefixed path to the file if docker : files [ name ] = docker_path ( outfile ) else : files [ name ] = outfile return files
2,890
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L45-L64
[ "def", "stop", "(", "self", ")", ":", "with", "self", ".", "__receiver_thread_exit_condition", ":", "while", "not", "self", ".", "__receiver_thread_exited", "and", "self", ".", "is_connected", "(", ")", ":", "self", ".", "__receiver_thread_exit_condition", ".", "wait", "(", ")" ]
Gunzips the input file to the same directory
def gunzip ( input_gzip_file , block_size = 1024 ) : assert os . path . splitext ( input_gzip_file ) [ 1 ] == '.gz' assert is_gzipfile ( input_gzip_file ) with gzip . open ( input_gzip_file ) as infile : with open ( os . path . splitext ( input_gzip_file ) [ 0 ] , 'w' ) as outfile : while True : block = infile . read ( block_size ) if block == '' : break else : outfile . write ( block ) return outfile . name
2,891
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L163-L181
[ "def", "get_metric", "(", "self", ",", "slug", ")", ":", "results", "=", "OrderedDict", "(", ")", "granularities", "=", "self", ".", "_granularities", "(", ")", "keys", "=", "self", ".", "_build_keys", "(", "slug", ")", "for", "granularity", ",", "key", "in", "zip", "(", "granularities", ",", "keys", ")", ":", "results", "[", "granularity", "]", "=", "self", ".", "r", ".", "get", "(", "key", ")", "return", "results" ]
Attempt to ascertain the gzip status of a file based on the magic signatures of the file .
def is_gzipfile ( filename ) : assert os . path . exists ( filename ) , 'Input {} does not ' . format ( filename ) + 'point to a file.' with open ( filename , 'rb' ) as in_f : start_of_file = in_f . read ( 3 ) if start_of_file == '\x1f\x8b\x08' : return True else : return False
2,892
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L184-L203
[ "def", "get_experiment_from_key", "(", "self", ",", "experiment_key", ")", ":", "experiment", "=", "self", ".", "experiment_key_map", ".", "get", "(", "experiment_key", ")", "if", "experiment", ":", "return", "experiment", "self", ".", "logger", ".", "error", "(", "'Experiment key \"%s\" is not in datafile.'", "%", "experiment_key", ")", "self", ".", "error_handler", ".", "handle_error", "(", "exceptions", ".", "InvalidExperimentException", "(", "enums", ".", "Errors", ".", "INVALID_EXPERIMENT_KEY_ERROR", ")", ")", "return", "None" ]
Download a supplied URL that points to a file in the NCBI GDC database . The path to the gdc download token must be provided . The file is downloaded and written to the jobstore if requested .
def get_file_from_gdc ( job , gdc_url , gdc_download_token , write_to_jobstore = True ) : work_dir = job . fileStore . getLocalTempDir ( ) parsed_url = urlparse ( gdc_url ) assert parsed_url . scheme == 'gdc' , 'Unexpected url scheme: %s' % gdc_url file_dir = '/' . join ( [ work_dir , parsed_url . netloc ] ) # This is common to encrypted and unencrypted downloads currwd = os . getcwd ( ) os . chdir ( work_dir ) try : download_call = [ 'gdc-client' , 'download' , '-t' , gdc_download_token , parsed_url . netloc ] subprocess . check_call ( download_call ) finally : os . chdir ( currwd ) assert os . path . exists ( file_dir ) output_files = [ os . path . join ( file_dir , x ) for x in os . listdir ( file_dir ) if not x . endswith ( 'logs' ) ] # NOTE: We only handle vcf and bam+bai if len ( output_files ) == 1 : assert output_files [ 0 ] . endswith ( 'vcf' ) else : if not { os . path . splitext ( x ) [ 1 ] for x in output_files } >= { '.bam' , '.bai' } : raise ParameterError ( 'Can currently only handle pre-indexed GDC bams.' ) # Always [bam, bai] output_files = [ x for x in output_files if x . endswith ( ( 'bam' , 'bai' ) ) ] output_files = sorted ( output_files , key = lambda x : os . path . splitext ( x ) [ 1 ] , reverse = True ) if write_to_jobstore : output_files = [ job . fileStore . writeGlobalFile ( f ) for f in output_files ] return output_files
2,893
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L206-L248
[ "def", "replace", "(", "self", ",", "source", ",", "dest", ")", ":", "for", "i", ",", "broker", "in", "enumerate", "(", "self", ".", "replicas", ")", ":", "if", "broker", "==", "source", ":", "self", ".", "replicas", "[", "i", "]", "=", "dest", "return" ]
Download a supplied URL that points to a file on an http https or ftp server . If the file is found to be an https s3 link then the file is downloaded using get_file_from_s3 . The file is downloaded and written to the jobstore if requested . Encryption arguments are for passing to get_file_from_s3 if required .
def get_file_from_url ( job , any_url , encryption_key = None , per_file_encryption = True , write_to_jobstore = True ) : work_dir = job . fileStore . getLocalTempDir ( ) filename = '/' . join ( [ work_dir , str ( uuid . uuid4 ( ) ) ] ) url = any_url parsed_url = urlparse ( any_url ) try : response = urllib2 . urlopen ( url ) except urllib2 . HTTPError : if parsed_url . netloc . startswith ( ( 's3' , 'S3' ) ) : job . fileStore . logToMaster ( "Detected https link is for an encrypted s3 file." ) return get_file_from_s3 ( job , any_url , encryption_key = encryption_key , per_file_encryption = per_file_encryption , write_to_jobstore = write_to_jobstore ) else : raise else : with open ( filename , 'w' ) as f : f . write ( response . read ( ) ) if write_to_jobstore : filename = job . fileStore . writeGlobalFile ( filename ) return filename
2,894
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L337-L373
[ "def", "add_mismatch", "(", "self", ",", "entity", ",", "*", "traits", ")", ":", "for", "trait", "in", "traits", ":", "self", ".", "index", "[", "trait", "]", ".", "add", "(", "entity", ")" ]
Split an input bam to paired fastqs .
def bam2fastq ( bamfile , univ_options , picard_options ) : work_dir = os . path . split ( bamfile ) [ 0 ] base_name = os . path . split ( os . path . splitext ( bamfile ) [ 0 ] ) [ 1 ] parameters = [ 'SamToFastq' , '' . join ( [ 'I=' , docker_path ( bamfile ) ] ) , '' . join ( [ 'F=/data/' , base_name , '_1.fastq' ] ) , '' . join ( [ 'F2=/data/' , base_name , '_2.fastq' ] ) , '' . join ( [ 'FU=/data/' , base_name , '_UP.fastq' ] ) ] docker_call ( tool = 'picard' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , java_xmx = univ_options [ 'java_Xmx' ] , tool_version = picard_options [ 'version' ] ) first_fastq = '' . join ( [ work_dir , '/' , base_name , '_1.fastq' ] ) assert os . path . exists ( first_fastq ) return first_fastq
2,895
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L376-L398
[ "def", "node_cmd", "(", "cmd_name", ",", "node_dict", ")", ":", "sc", "=", "{", "\"run\"", ":", "cmd_startstop", ",", "\"stop\"", ":", "cmd_startstop", ",", "\"connect\"", ":", "cmd_connect", ",", "\"details\"", ":", "cmd_details", "}", "node_num", "=", "node_selection", "(", "cmd_name", ",", "len", "(", "node_dict", ")", ")", "refresh_main", "=", "None", "if", "node_num", "!=", "0", ":", "(", "node_valid", ",", "node_info", ")", "=", "node_validate", "(", "node_dict", ",", "node_num", ",", "cmd_name", ")", "if", "node_valid", ":", "sub_cmd", "=", "sc", "[", "cmd_name", "]", "# get sub-command", "refresh_main", "=", "sub_cmd", "(", "node_dict", "[", "node_num", "]", ",", "cmd_name", ",", "node_info", ")", "else", ":", "# invalid target", "ui_print_suffix", "(", "node_info", ",", "C_ERR", ")", "sleep", "(", "1.5", ")", "else", ":", "# '0' entered - exit command but not program", "ui_print", "(", "\" - Exit Command\"", ")", "sleep", "(", "0.5", ")", "return", "refresh_main" ]
Write out a file to a given location . The location can be either a directory on the local machine or a folder with a bucket on AWS .
def export_results ( job , fsid , file_name , univ_options , subfolder = None ) : job . fileStore . logToMaster ( 'Exporting %s to output location' % fsid ) file_name = os . path . basename ( file_name ) try : assert univ_options [ 'output_folder' ] , 'Need a path to a folder to write out files' assert univ_options [ 'storage_location' ] , 'Need to know where the files need to go. ' + 'Local or AWS/Azure, etc.' except AssertionError as err : # This isn't a game killer. Continue the pipeline without erroring out but do inform the # user about it. print ( 'ERROR:' , err . message , file = sys . stderr ) return if univ_options [ 'output_folder' ] == 'NA' : output_folder = '' else : output_folder = univ_options [ 'output_folder' ] output_folder = os . path . join ( output_folder , univ_options [ 'patient' ] ) output_folder = os . path . join ( output_folder , subfolder ) if subfolder else output_folder if univ_options [ 'storage_location' ] == 'local' : # Handle Local try : # Create the directory if required os . makedirs ( output_folder , 0755 ) except OSError as err : if err . errno != errno . EEXIST : raise output_url = 'file://' + os . path . join ( output_folder , file_name ) elif univ_options [ 'storage_location' ] . startswith ( 'aws' ) : # Handle AWS bucket_name = univ_options [ 'storage_location' ] . split ( ':' ) [ - 1 ] output_url = os . path . join ( 'S3://' , bucket_name , output_folder . strip ( '/' ) , file_name ) # Can't do Azure or google yet. else : # TODO: Azure support print ( "Currently doesn't support anything but Local and aws." ) return job . fileStore . exportFile ( fsid , output_url )
2,896
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L401-L448
[ "def", "validate_dtype_freq", "(", "dtype", ",", "freq", ")", ":", "if", "freq", "is", "not", "None", ":", "freq", "=", "frequencies", ".", "to_offset", "(", "freq", ")", "if", "dtype", "is", "not", "None", ":", "dtype", "=", "pandas_dtype", "(", "dtype", ")", "if", "not", "is_period_dtype", "(", "dtype", ")", ":", "raise", "ValueError", "(", "'dtype must be PeriodDtype'", ")", "if", "freq", "is", "None", ":", "freq", "=", "dtype", ".", "freq", "elif", "freq", "!=", "dtype", ".", "freq", ":", "raise", "IncompatibleFrequency", "(", "'specified freq and dtype '", "'are different'", ")", "return", "freq" ]
Parse a chromosome string into a list .
def parse_chromosome_string ( job , chromosome_string ) : if chromosome_string is None : return [ ] else : assert isinstance ( chromosome_string , str ) chroms = [ c . strip ( ) for c in chromosome_string . split ( ',' ) ] if 'canonical' in chroms : assert 'canonical_chr' not in chroms , 'Cannot have canonical and canonical_chr' chr_prefix = False chroms . remove ( 'canonical' ) out_chroms = [ str ( c ) for c in range ( 1 , 23 ) ] + [ 'X' , 'Y' ] elif 'canonical_chr' in chroms : assert 'canonical' not in chroms , 'Cannot have canonical and canonical_chr' chr_prefix = True chroms . remove ( 'canonical_chr' ) out_chroms = [ 'chr' + str ( c ) for c in range ( 1 , 23 ) ] + [ 'chrX' , 'chrY' ] else : chr_prefix = None out_chroms = [ ] for chrom in chroms : if chr_prefix is not None and chrom . startswith ( 'chr' ) is not chr_prefix : job . fileStore . logToMaster ( 'chromosome %s does not match the rest that %s begin ' 'with "chr".' % ( chrom , 'all' if chr_prefix else 'don\'t' ) , level = logging . WARNING ) out_chroms . append ( chrom ) return chrom_sorted ( out_chroms )
2,897
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L521-L554
[ "def", "json_engine", "(", "self", ",", "req", ")", ":", "# pylint: disable=R0201,W0613", "try", ":", "return", "stats", ".", "engine_data", "(", "config", ".", "engine", ")", "except", "(", "error", ".", "LoggableError", ",", "xmlrpc", ".", "ERRORS", ")", "as", "torrent_exc", ":", "raise", "exc", ".", "HTTPInternalServerError", "(", "str", "(", "torrent_exc", ")", ")" ]
Send an email to the user when the run finishes .
def email_report ( job , univ_options ) : fromadd = "results@protect.cgl.genomics.ucsc.edu" msg = MIMEMultipart ( ) msg [ 'From' ] = fromadd if univ_options [ 'mail_to' ] is None : return else : msg [ 'To' ] = univ_options [ 'mail_to' ] msg [ 'Subject' ] = "Protect run for sample %s completed successfully." % univ_options [ 'patient' ] body = "Protect run for sample %s completed successfully." % univ_options [ 'patient' ] msg . attach ( MIMEText ( body , 'plain' ) ) text = msg . as_string ( ) try : server = smtplib . SMTP ( 'localhost' ) except socket . error as e : if e . errno == 111 : print ( 'No mail utils on this maachine' ) else : print ( 'Unexpected error while attempting to send an email.' ) print ( 'Could not send email report' ) except : print ( 'Could not send email report' ) else : server . sendmail ( fromadd , msg [ 'To' ] , text ) server . quit ( )
2,898
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/common.py#L619-L649
[ "def", "_check_rest_version", "(", "self", ",", "version", ")", ":", "version", "=", "str", "(", "version", ")", "if", "version", "not", "in", "self", ".", "supported_rest_versions", ":", "msg", "=", "\"Library is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "array_rest_versions", "=", "self", ".", "_list_available_rest_versions", "(", ")", "if", "version", "not", "in", "array_rest_versions", ":", "msg", "=", "\"Array is incompatible with REST API version {0}\"", "raise", "ValueError", "(", "msg", ".", "format", "(", "version", ")", ")", "return", "LooseVersion", "(", "version", ")" ]
Return hashable structure from non - hashable structure using hippie means
def make_key_hippie ( obj , typed = True ) : ftype = type if typed else lambda o : None if is_hashable ( obj ) : ## DO NOT RETURN hash(obj), as hash collision would generate bad ## cache collisions. return obj , ftype ( obj ) ## should we try to convert to frozen{set,dict} to get the C ## hashing function speed ? But the convertion has a cost also. if isinstance ( obj , set ) : obj = sorted ( obj ) if isinstance ( obj , ( list , tuple ) ) : return tuple ( make_key_hippie ( e , typed ) for e in obj ) if isinstance ( obj , dict ) : return tuple ( sorted ( ( ( make_key_hippie ( k , typed ) , make_key_hippie ( v , typed ) ) for k , v in obj . items ( ) ) ) ) raise ValueError ( "%r can not be hashed. Try providing a custom key function." % obj )
2,899
https://github.com/0k/kids.cache/blob/668f3b966877c4a0855d60e05cc3706cf37e4570/src/kids/cache/__init__.py#L29-L54
[ "def", "is_train_dir", "(", "dir_", ":", "str", ")", "->", "bool", ":", "return", "path", ".", "exists", "(", "path", ".", "join", "(", "dir_", ",", "CXF_CONFIG_FILE", ")", ")", "and", "path", ".", "exists", "(", "path", ".", "join", "(", "dir_", ",", "CXF_TRACE_FILE", ")", ")", "and", "path", ".", "exists", "(", "path", ".", "join", "(", "dir_", ",", "CXF_LOG_FILE", ")", ")" ]