idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
250,600
def sas_logical_interconnects ( self ) : if not self . __sas_logical_interconnects : self . __sas_logical_interconnects = SasLogicalInterconnects ( self . __connection ) return self . __sas_logical_interconnects
Gets the SasLogicalInterconnects API client .
67
12
250,601
def logical_downlinks ( self ) : if not self . __logical_downlinks : self . __logical_downlinks = LogicalDownlinks ( self . __connection ) return self . __logical_downlinks
Gets the LogicalDownlinks API client .
48
10
250,602
def power_devices ( self ) : if not self . __power_devices : self . __power_devices = PowerDevices ( self . __connection ) return self . __power_devices
Gets the PowerDevices API client .
40
9
250,603
def unmanaged_devices ( self ) : if not self . __unmanaged_devices : self . __unmanaged_devices = UnmanagedDevices ( self . __connection ) return self . __unmanaged_devices
Gets the Unmanaged Devices API client .
45
9
250,604
def racks ( self ) : if not self . __racks : self . __racks = Racks ( self . __connection ) return self . __racks
Gets the Racks API client .
34
8
250,605
def san_managers ( self ) : if not self . __san_managers : self . __san_managers = SanManagers ( self . __connection ) return self . __san_managers
Gets the SanManagers API client .
44
9
250,606
def endpoints ( self ) : if not self . __endpoints : self . __endpoints = Endpoints ( self . __connection ) return self . __endpoints
Gets the Endpoints API client .
35
8
250,607
def storage_systems ( self ) : if not self . __storage_systems : self . __storage_systems = StorageSystems ( self . __connection ) return self . __storage_systems
Gets the StorageSystems API client .
44
9
250,608
def storage_pools ( self ) : if not self . __storage_pools : self . __storage_pools = StoragePools ( self . __connection ) return self . __storage_pools
Gets the StoragePools API client .
44
9
250,609
def storage_volume_templates ( self ) : if not self . __storage_volume_templates : self . __storage_volume_templates = StorageVolumeTemplates ( self . __connection ) return self . __storage_volume_templates
Gets the StorageVolumeTemplates API client .
53
10
250,610
def storage_volume_attachments ( self ) : if not self . __storage_volume_attachments : self . __storage_volume_attachments = StorageVolumeAttachments ( self . __connection ) return self . __storage_volume_attachments
Gets the StorageVolumeAttachments API client .
53
10
250,611
def firmware_drivers ( self ) : if not self . __firmware_drivers : self . __firmware_drivers = FirmwareDrivers ( self . __connection ) return self . __firmware_drivers
Gets the FirmwareDrivers API client .
47
10
250,612
def firmware_bundles ( self ) : if not self . __firmware_bundles : self . __firmware_bundles = FirmwareBundles ( self . __connection ) return self . __firmware_bundles
Gets the FirmwareBundles API client .
56
11
250,613
def volumes ( self ) : if not self . __volumes : self . __volumes = Volumes ( self . __connection ) return self . __volumes
Gets the Volumes API client .
34
8
250,614
def sas_logical_jbod_attachments ( self ) : if not self . __sas_logical_jbod_attachments : self . __sas_logical_jbod_attachments = SasLogicalJbodAttachments ( self . __connection ) return self . __sas_logical_jbod_attachments
Gets the SAS Logical JBOD Attachments client .
81
14
250,615
def managed_sans ( self ) : if not self . __managed_sans : self . __managed_sans = ManagedSANs ( self . __connection ) return self . __managed_sans
Gets the Managed SANs API client .
45
10
250,616
def migratable_vc_domains ( self ) : if not self . __migratable_vc_domains : self . __migratable_vc_domains = MigratableVcDomains ( self . __connection ) return self . __migratable_vc_domains
Gets the VC Migration Manager API client .
63
9
250,617
def sas_interconnects ( self ) : if not self . __sas_interconnects : self . __sas_interconnects = SasInterconnects ( self . __connection ) return self . __sas_interconnects
Gets the SAS Interconnects API client .
53
10
250,618
def sas_logical_interconnect_groups ( self ) : if not self . __sas_logical_interconnect_groups : self . __sas_logical_interconnect_groups = SasLogicalInterconnectGroups ( self . __connection ) return self . __sas_logical_interconnect_groups
Gets the SasLogicalInterconnectGroups API client .
72
13
250,619
def drive_enclosures ( self ) : if not self . __drive_enclures : self . __drive_enclures = DriveEnclosures ( self . __connection ) return self . __drive_enclures
Gets the Drive Enclosures API client .
49
10
250,620
def sas_logical_jbods ( self ) : if not self . __sas_logical_jbods : self . __sas_logical_jbods = SasLogicalJbods ( self . __connection ) return self . __sas_logical_jbods
Gets the SAS Logical JBODs API client .
67
13
250,621
def labels ( self ) : if not self . __labels : self . __labels = Labels ( self . __connection ) return self . __labels
Gets the Labels API client .
34
8
250,622
def index_resources ( self ) : if not self . __index_resources : self . __index_resources = IndexResources ( self . __connection ) return self . __index_resources
Gets the Index Resources API client .
39
8
250,623
def alerts ( self ) : if not self . __alerts : self . __alerts = Alerts ( self . __connection ) return self . __alerts
Gets the Alerts API client .
34
8
250,624
def events ( self ) : if not self . __events : self . __events = Events ( self . __connection ) return self . __events
Gets the Events API client .
30
7
250,625
def os_deployment_servers ( self ) : if not self . __os_deployment_servers : self . __os_deployment_servers = OsDeploymentServers ( self . __connection ) return self . __os_deployment_servers
Gets the Os Deployment Servers API client .
62
11
250,626
def certificate_rabbitmq ( self ) : if not self . __certificate_rabbitmq : self . __certificate_rabbitmq = CertificateRabbitMQ ( self . __connection ) return self . __certificate_rabbitmq
Gets the Certificate RabbitMQ API client .
56
9
250,627
def users ( self ) : if not self . __users : self . __users = Users ( self . __connection ) return self . __users
Gets the Users API client .
30
7
250,628
def appliance_device_read_community ( self ) : if not self . __appliance_device_read_community : self . __appliance_device_read_community = ApplianceDeviceReadCommunity ( self . __connection ) return self . __appliance_device_read_community
Gets the ApplianceDeviceReadCommunity API client .
64
11
250,629
def appliance_device_snmp_v1_trap_destinations ( self ) : if not self . __appliance_device_snmp_v1_trap_destinations : self . __appliance_device_snmp_v1_trap_destinations = ApplianceDeviceSNMPv1TrapDestinations ( self . __connection ) return self . __appliance_device_snmp_v1_trap_destinations
Gets the ApplianceDeviceSNMPv1TrapDestinations API client .
98
17
250,630
def appliance_device_snmp_v3_trap_destinations ( self ) : if not self . __appliance_device_snmp_v3_trap_destinations : self . __appliance_device_snmp_v3_trap_destinations = ApplianceDeviceSNMPv3TrapDestinations ( self . __connection ) return self . __appliance_device_snmp_v3_trap_destinations
Gets the ApplianceDeviceSNMPv3TrapDestinations API client .
98
17
250,631
def appliance_device_snmp_v3_users ( self ) : if not self . __appliance_device_snmp_v3_users : self . __appliance_device_snmp_v3_users = ApplianceDeviceSNMPv3Users ( self . __connection ) return self . __appliance_device_snmp_v3_users
Gets the ApplianceDeviceSNMPv3Users API client .
83
14
250,632
def appliance_node_information ( self ) : if not self . __appliance_node_information : self . __appliance_node_information = ApplianceNodeInformation ( self . __connection ) return self . __appliance_node_information
Gets the ApplianceNodeInformation API client .
55
10
250,633
def appliance_time_and_locale_configuration ( self ) : if not self . __appliance_time_and_locale_configuration : self . __appliance_time_and_locale_configuration = ApplianceTimeAndLocaleConfiguration ( self . __connection ) return self . __appliance_time_and_locale_configuration
Gets the ApplianceTimeAndLocaleConfiguration API client .
82
13
250,634
def versions ( self ) : if not self . __versions : self . __versions = Versions ( self . __connection ) return self . __versions
Gets the Version API client .
31
7
250,635
def backups ( self ) : if not self . __backups : self . __backups = Backups ( self . __connection ) return self . __backups
Gets the Backup API client .
34
7
250,636
def login_details ( self ) : if not self . __login_details : self . __login_details = LoginDetails ( self . __connection ) return self . __login_details
Gets the login details
39
5
250,637
def get_available_networks ( self , * * kwargs ) : query_string = '&' . join ( '{}={}' . format ( key , value ) for key , value in kwargs . items ( ) if value ) uri = self . URI + "{}?{}" . format ( "/available-networks" , query_string ) return self . _helper . do_get ( uri )
Retrieves the list of Ethernet networks Fibre Channel networks and network sets that are available to a server profile template along with their respective ports . The scopeUris serverHardwareTypeUri and enclosureGroupUri parameters should be specified to get the available networks for a new server profile template . The serverHardwareTypeUri enclosureGroupUri and profileTemplateUri should be specified to get available networks for an existing server profile template . The scopeUris parameter is ignored when the profileTemplateUri is specified .
95
104
250,638
def get_all_without_ethernet ( self , start = 0 , count = - 1 , filter = '' , sort = '' ) : without_ethernet_client = ResourceClient ( self . _connection , "/rest/logical-downlinks/withoutEthernet" ) return without_ethernet_client . get_all ( start , count , filter = filter , sort = sort )
Gets a paginated collection of logical downlinks without ethernet . The collection is based on optional sorting and filtering and is constrained by start and count parameters .
87
32
250,639
def get_without_ethernet ( self , id_or_uri ) : uri = self . _client . build_uri ( id_or_uri ) + "/withoutEthernet" return self . _client . get ( uri )
Gets the logical downlink with the specified ID without ethernet .
54
14
250,640
def update_firmware ( self , firmware_information , force = False ) : firmware_uri = "{}/firmware" . format ( self . data [ "uri" ] ) result = self . _helper . update ( firmware_information , firmware_uri , force = force ) self . refresh ( ) return result
Installs firmware to the member interconnects of a SAS Logical Interconnect .
70
17
250,641
def get_firmware ( self ) : firmware_uri = "{}/firmware" . format ( self . data [ "uri" ] ) return self . _helper . do_get ( firmware_uri )
Gets baseline firmware information for a SAS Logical Interconnect .
48
13
250,642
def update_compliance_all ( self , information , timeout = - 1 ) : uri = self . URI + "/compliance" result = self . _helper . update ( information , uri , timeout = timeout ) return result
Returns SAS Logical Interconnects to a consistent state . The current SAS Logical Interconnect state is compared to the associated SAS Logical Interconnect group .
48
32
250,643
def replace_drive_enclosure ( self , information ) : uri = "{}/replaceDriveEnclosure" . format ( self . data [ "uri" ] ) result = self . _helper . create ( information , uri ) self . refresh ( ) return result
When a drive enclosure has been physically replaced initiate the replacement operation that enables the new drive enclosure to take over as a replacement for the prior drive enclosure . The request requires specification of both the serial numbers of the original drive enclosure and its replacement to be provided .
58
51
250,644
def update_configuration ( self ) : uri = "{}/configuration" . format ( self . data [ "uri" ] ) result = self . _helper . update ( { } , uri ) self . refresh ( ) return result
Asynchronously applies or re - applies the SAS Logical Interconnect configuration to all managed interconnects of a SAS Logical Interconnect .
53
29
250,645
def mutationhash ( strings , nedit ) : maxlen = max ( [ len ( string ) for string in strings ] ) indexes = generate_idx ( maxlen , nedit ) muthash = defaultdict ( set ) for string in strings : muthash [ string ] . update ( [ string ] ) for x in substitution_set ( string , indexes ) : muthash [ x ] . update ( [ string ] ) return muthash
produce a hash with each key a nedit distance substitution for a set of strings . values of the hash is the set of strings the substitution could have come from
95
33
250,646
def substitution_set ( string , indexes ) : strlen = len ( string ) return { mutate_string ( string , x ) for x in indexes if valid_substitution ( strlen , x ) }
for a string return a set of all possible substitutions
45
11
250,647
def valid_substitution ( strlen , index ) : values = index [ 0 ] return all ( [ strlen > i for i in values ] )
skip performing substitutions that are outside the bounds of the string
33
12
250,648
def acgt_match ( string ) : search = re . compile ( r'[^ACGT]' ) . search return not bool ( search ( string ) )
returns True if sting consist of only A C G T
34
12
250,649
def stream_fastq ( file_handler ) : next_element = '' for i , line in enumerate ( file_handler ) : next_element += line if i % 4 == 3 : yield next_element next_element = ''
Generator which gives all four lines if a fastq read as one string
50
15
250,650
def read_fastq ( filename ) : if not filename : return itertools . cycle ( ( None , ) ) if filename == "-" : filename_fh = sys . stdin elif filename . endswith ( 'gz' ) : if is_python3 : filename_fh = gzip . open ( filename , mode = 'rt' ) else : filename_fh = BufferedReader ( gzip . open ( filename , mode = 'rt' ) ) else : filename_fh = open ( filename ) return stream_fastq ( filename_fh )
return a stream of FASTQ entries handling gzipped and empty files
124
15
250,651
def write_fastq ( filename ) : if filename : if filename . endswith ( 'gz' ) : filename_fh = gzip . open ( filename , mode = 'wb' ) else : filename_fh = open ( filename , mode = 'w' ) else : filename_fh = None return filename_fh
return a handle for FASTQ writing handling gzipped files
72
13
250,652
def detect_alignment_annotations ( queryalignment , tags = False ) : annotations = set ( ) for k , v in BARCODEINFO . items ( ) : if tags : if queryalignment . has_tag ( v . bamtag ) : annotations . add ( k ) else : if v . readprefix in queryalignment . qname : annotations . add ( k ) return annotations
detects the annotations present in a SAM file inspecting either the tags or the query names and returns a set of annotations present
85
25
250,653
def detect_fastq_annotations ( fastq_file ) : annotations = set ( ) queryread = tz . first ( read_fastq ( fastq_file ) ) for k , v in BARCODEINFO . items ( ) : if v . readprefix in queryread : annotations . add ( k ) return annotations
detects annotations preesent in a FASTQ file by examining the first read
70
18
250,654
def construct_transformed_regex ( annotations ) : re_string = '.*' if "cellular" in annotations : re_string += ":CELL_(?P<CB>.*)" if "molecular" in annotations : re_string += ":UMI_(?P<MB>\w*)" if "sample" in annotations : re_string += ":SAMPLE_(?P<SB>\w*)" if re_string == ".*" : logger . error ( "No annotation present on this file, aborting." ) sys . exit ( 1 ) return re_string
construct a regex that matches possible fields in a transformed file annotations is a set of which keys in BARCODEINFO are present in the file
130
28
250,655
def _infer_transform_options ( transform ) : TransformOptions = collections . namedtuple ( "TransformOptions" , [ 'CB' , 'dual_index' , 'triple_index' , 'MB' , 'SB' ] ) CB = False SB = False MB = False dual_index = False triple_index = False for rx in transform . values ( ) : if not rx : continue if "CB1" in rx : if "CB3" in rx : triple_index = True else : dual_index = True if "SB" in rx : SB = True if "CB" in rx : CB = True if "MB" in rx : MB = True return TransformOptions ( CB = CB , dual_index = dual_index , triple_index = triple_index , MB = MB , SB = SB )
figure out what transform options should be by examining the provided regexes for keywords
184
15
250,656
def _extract_readnum ( read_dict ) : pat = re . compile ( r"(?P<readnum>/\d+)$" ) parts = pat . split ( read_dict [ "name" ] ) if len ( parts ) == 3 : name , readnum , endofline = parts read_dict [ "name" ] = name read_dict [ "readnum" ] = readnum else : read_dict [ "readnum" ] = "" return read_dict
Extract read numbers from old - style fastqs .
106
11
250,657
def sparse ( csv , sparse ) : import pandas as pd df = pd . read_csv ( csv , index_col = 0 , header = 0 ) pd . Series ( df . index ) . to_csv ( sparse + ".rownames" , index = False ) pd . Series ( df . columns . values ) . to_csv ( sparse + ".colnames" , index = False ) with open ( sparse , "w+b" ) as out_handle : scipy . io . mmwrite ( out_handle , scipy . sparse . csr_matrix ( df ) )
Convert a CSV file to a sparse matrix with rows and column names saved as companion files .
133
19
250,658
def cb_histogram ( fastq , umi_histogram ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) cb_counter = collections . Counter ( ) umi_counter = collections . Counter ( ) for read in read_fastq ( fastq ) : match = parser_re . search ( read ) . groupdict ( ) cb = match [ 'CB' ] cb_counter [ cb ] += 1 if umi_histogram : umi = match [ 'MB' ] umi_counter [ ( cb , umi ) ] += 1 for bc , count in cb_counter . most_common ( ) : sys . stdout . write ( '{}\t{}\n' . format ( bc , count ) ) if umi_histogram : with open ( umi_histogram , "w" ) as umi_handle : for cbumi , count in umi_counter . most_common ( ) : umi_handle . write ( '{}\t{}\t{}\n' . format ( cbumi [ 0 ] , cbumi [ 1 ] , count ) )
Counts the number of reads for each cellular barcode
274
11
250,659
def umi_histogram ( fastq ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) counter = collections . Counter ( ) for read in read_fastq ( fastq ) : match = parser_re . search ( read ) . groupdict ( ) counter [ match [ 'MB' ] ] += 1 for bc , count in counter . most_common ( ) : sys . stdout . write ( '{}\t{}\n' . format ( bc , count ) )
Counts the number of reads for each UMI
131
10
250,660
def get_cb_depth_set ( cb_histogram , cb_cutoff ) : cb_keep_set = set ( ) if not cb_histogram : return cb_keep_set with read_cbhistogram ( cb_histogram ) as fh : cb_map = dict ( p . strip ( ) . split ( ) for p in fh ) cb_keep_set = set ( [ k for k , v in cb_map . items ( ) if int ( v ) > cb_cutoff ] ) logger . info ( 'Keeping %d out of %d cellular barcodes.' % ( len ( cb_keep_set ) , len ( cb_map ) ) ) return cb_keep_set
Returns a set of barcodes with a minimum number of reads
167
12
250,661
def guess_depth_cutoff ( cb_histogram ) : with read_cbhistogram ( cb_histogram ) as fh : cb_vals = [ int ( p . strip ( ) . split ( ) [ 1 ] ) for p in fh ] histo = np . histogram ( np . log10 ( cb_vals ) , bins = 50 ) vals = histo [ 0 ] edges = histo [ 1 ] mids = np . array ( [ ( edges [ i ] + edges [ i + 1 ] ) / 2 for i in range ( edges . size - 1 ) ] ) wdensity = vals * ( 10 ** mids ) / sum ( vals * ( 10 ** mids ) ) baseline = np . median ( wdensity ) wdensity = list ( wdensity ) # find highest density in upper half of barcode distribution peak = wdensity . index ( max ( wdensity [ len ( wdensity ) / 2 : ] ) ) cutoff = None for index , dens in reversed ( list ( enumerate ( wdensity [ 1 : peak ] ) ) ) : if dens < 2 * baseline : cutoff = index break if not cutoff : return None else : cutoff = 10 ** mids [ cutoff ] logger . info ( 'Setting barcode cutoff to %d' % cutoff ) return cutoff
Guesses at an appropriate barcode cutoff
277
8
250,662
def cb_filter ( fastq , bc1 , bc2 , bc3 , cores , nedit ) : with open_gzipsafe ( bc1 ) as bc1_fh : bc1 = set ( cb . strip ( ) for cb in bc1_fh ) if bc2 : with open_gzipsafe ( bc2 ) as bc2_fh : bc2 = set ( cb . strip ( ) for cb in bc2_fh ) if bc3 : with open_gzipsafe ( bc3 ) as bc3_fh : bc3 = set ( cb . strip ( ) for cb in bc3_fh ) annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) if nedit == 0 : filter_cb = partial ( exact_barcode_filter , bc1 = bc1 , bc2 = bc2 , bc3 = bc3 , re_string = re_string ) else : bc1hash = MutationHash ( bc1 , nedit ) bc2hash = None bc3hash = None if bc2 : bc2hash = MutationHash ( bc2 , nedit ) if bc3 : bc3hash = MutationHash ( bc3 , nedit ) filter_cb = partial ( correcting_barcode_filter , bc1hash = bc1hash , bc2hash = bc2hash , bc3hash = bc3hash , re_string = re_string ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_cb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read )
Filters reads with non - matching barcodes Expects formatted fastq files .
415
16
250,663
def sb_filter ( fastq , bc , cores , nedit ) : barcodes = set ( sb . strip ( ) for sb in bc ) if nedit == 0 : filter_sb = partial ( exact_sample_filter2 , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_sb = partial ( correcting_sample_filter2 , barcodehash = barcodehash ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_sb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read )
Filters reads with non - matching sample barcodes Expects formatted fastq files .
184
17
250,664
def mb_filter ( fastq , cores ) : filter_mb = partial ( umi_filter ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_mb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read )
Filters umis with non - ACGT bases Expects formatted fastq files .
111
17
250,665
def kallisto ( fastq , out_dir , cb_histogram , cb_cutoff ) : parser_re = re . compile ( '(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n' ) if fastq . endswith ( 'gz' ) : fastq_fh = gzip . GzipFile ( fileobj = open ( fastq ) ) elif fastq == "-" : fastq_fh = sys . stdin else : fastq_fh = open ( fastq ) cb_depth_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) cb_set = set ( ) cb_batch = collections . defaultdict ( list ) parsed = 0 for read in stream_fastq ( fastq_fh ) : match = parser_re . search ( read ) . groupdict ( ) umi = match [ 'UMI' ] cb = match [ 'CB' ] if cb_depth_set and cb not in cb_depth_set : continue parsed += 1 cb_set . add ( cb ) cb_batch [ cb ] . append ( ( read , umi ) ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) cb_batch = collections . defaultdict ( list ) for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) with open ( os . path . join ( out_dir , "barcodes.batch" ) , "w" ) as out_handle : out_handle . write ( "#id umi-file file-1\n" ) batchformat = "{cb} {cb}.umi {cb}.fq\n" for cb in cb_set : out_handle . write ( batchformat . format ( * * locals ( ) ) )
Convert fastqtransformed file to output format compatible with kallisto .
484
17
250,666
def demultiplex_samples ( fastq , out_dir , nedit , barcodes ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) if barcodes : barcodes = set ( barcode . strip ( ) for barcode in barcodes ) else : barcodes = set ( ) if nedit == 0 : filter_bc = partial ( exact_sample_filter , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_bc = partial ( correcting_sample_filter , barcodehash = barcodehash ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 read = filter_bc ( read ) if not read : continue match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'SB' ] sample_set . add ( sample ) batch [ sample ] . append ( read ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( fixed ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( read )
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each sample .
420
23
250,667
def demultiplex_cells ( fastq , out_dir , readnumber , prefix , cb_histogram , cb_cutoff ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) readstring = "" if not readnumber else "_R{}" . format ( readnumber ) filestring = "{prefix}{sample}{readstring}.fq" cb_set = set ( ) if cb_histogram : cb_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'CB' ] if cb_set and sample not in cb_set : continue sample_set . add ( sample ) batch [ sample ] . append ( read ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( * * locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( * * locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read )
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each cell .
406
23
250,668
def array_type ( data_types , field ) : from sqlalchemy . dialects import postgresql internal_type = field . base_field . get_internal_type ( ) # currently no support for multi-dimensional arrays if internal_type in data_types and internal_type != 'ArrayField' : sub_type = data_types [ internal_type ] ( field ) if not isinstance ( sub_type , ( list , tuple ) ) : sub_type = [ sub_type ] else : raise RuntimeError ( 'Unsupported array element type' ) return postgresql . ARRAY ( sub_type )
Allows conversion of Django ArrayField to SQLAlchemy Array . Takes care of mapping the type of the array element .
133
23
250,669
def set_verbose_logger_handlers ( ) : # noqa # type: (None) -> None global _REGISTERED_LOGGER_HANDLERS formatter = logging . Formatter ( '%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d ' '%(message)s' ) formatter . default_msec_format = '%s.%03d' for handler in _REGISTERED_LOGGER_HANDLERS : handler . setFormatter ( formatter )
Set logger handler formatters to more detail
131
8
250,670
def download ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Download ) ctx . initialize ( settings . TransferAction . Download ) specs = settings . create_download_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Downloader ( ctx . general_options , ctx . credentials , spec ) . start ( )
Download blobs or files from Azure Storage
106
8
250,671
def synccopy ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Synccopy ) ctx . initialize ( settings . TransferAction . Synccopy ) specs = settings . create_synccopy_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . SyncCopy ( ctx . general_options , ctx . credentials , spec ) . start ( )
Synchronously copy blobs or files between Azure Storage accounts
114
12
250,672
def upload ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Upload ) ctx . initialize ( settings . TransferAction . Upload ) specs = settings . create_upload_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Uploader ( ctx . general_options , ctx . credentials , spec ) . start ( )
Upload files to Azure Storage
106
5
250,673
def get_idp_sso_supported_bindings ( idp_entity_id = None , config = None ) : if config is None : # avoid circular import from djangosaml2 . conf import get_config config = get_config ( ) # load metadata store from config meta = getattr ( config , 'metadata' , { } ) # if idp is None, assume only one exists so just use that if idp_entity_id is None : # .keys() returns dict_keys in python3.5+ try : idp_entity_id = list ( available_idps ( config ) . keys ( ) ) [ 0 ] except IndexError : raise ImproperlyConfigured ( "No IdP configured!" ) try : return meta . service ( idp_entity_id , 'idpsso_descriptor' , 'single_sign_on_service' ) . keys ( ) except UnknownSystemEntity : return [ ]
Returns the list of bindings supported by an IDP This is not clear in the pysaml2 code so wrapping it in a util
205
27
250,674
def fail_acs_response ( request , * args , * * kwargs ) : failure_function = import_string ( get_custom_setting ( 'SAML_ACS_FAILURE_RESPONSE_FUNCTION' , 'djangosaml2.acs_failures.template_failure' ) ) return failure_function ( request , * args , * * kwargs )
Serves as a common mechanism for ending ACS in case of any SAML related failure . Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project .
89
49
250,675
def echo_attributes ( request , config_loader_path = None , template = 'djangosaml2/echo_attributes.html' ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) try : identity = client . users . get_identity ( subject_id , check_not_on_or_after = False ) except AttributeError : return HttpResponse ( "No active SAML identity found. Are you sure you have logged in via SAML?" ) return render ( request , template , { 'attributes' : identity [ 0 ] } )
Example view that echo the SAML attributes of an user
177
11
250,676
def logout ( request , config_loader_path = None ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s' , request . user ) result = client . global_logout ( subject_id ) state . sync ( ) if not result : logger . error ( "Looks like the user %s is not logged in any IdP/AA" , subject_id ) return HttpResponseBadRequest ( "You are not logged in any IdP/AA" ) if len ( result ) > 1 : logger . error ( 'Sorry, I do not know how to logout from several sources. I will logout just from the first one' ) for entityid , logout_info in result . items ( ) : if isinstance ( logout_info , tuple ) : binding , http_info = logout_info if binding == BINDING_HTTP_POST : logger . debug ( 'Returning form to the IdP to continue the logout process' ) body = '' . join ( http_info [ 'data' ] ) return HttpResponse ( body ) elif binding == BINDING_HTTP_REDIRECT : logger . debug ( 'Redirecting to the IdP to continue the logout process' ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'Unknown binding: %s' , binding ) return HttpResponseServerError ( 'Failed to log out' ) else : # We must have had a soap logout return finish_logout ( request , logout_info ) logger . error ( 'Could not logout because there only the HTTP_REDIRECT is supported' ) return HttpResponseServerError ( 'Logout Binding not supported' )
SAML Logout Request initiator
448
7
250,677
def do_logout_service ( request , data , binding , config_loader_path = None , next_page = None , logout_error_template = 'djangosaml2/logout_error.html' ) : logger . debug ( 'Logout service started' ) conf = get_config ( config_loader_path , request ) state = StateCache ( request . session ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) if 'SAMLResponse' in data : # we started the logout logger . debug ( 'Receiving a logout response from the IdP' ) response = client . parse_logout_request_response ( data [ 'SAMLResponse' ] , binding ) state . sync ( ) return finish_logout ( request , response , next_page = next_page ) elif 'SAMLRequest' in data : # logout started by the IdP logger . debug ( 'Receiving a logout request from the IdP' ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s. Performing local logout' , request . user ) auth . logout ( request ) return render ( request , logout_error_template , status = 403 ) else : http_info = client . handle_logout_request ( data [ 'SAMLRequest' ] , subject_id , binding , relay_state = data . get ( 'RelayState' , '' ) ) state . sync ( ) auth . logout ( request ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'No SAMLResponse or SAMLRequest parameter found' ) raise Http404 ( 'No SAMLResponse or SAMLRequest parameter found' )
SAML Logout Response endpoint
414
6
250,678
def metadata ( request , config_loader_path = None , valid_for = None ) : conf = get_config ( config_loader_path , request ) metadata = entity_descriptor ( conf ) return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" )
Returns an XML with the SAML 2 . 0 metadata for this SP as configured in the settings . py file .
83
23
250,679
def configure_user ( self , user , attributes , attribute_mapping ) : user . set_unusable_password ( ) return self . update_user ( user , attributes , attribute_mapping , force_save = True )
Configures a user after creation and returns the updated user .
49
12
250,680
def update_user ( self , user , attributes , attribute_mapping , force_save = False ) : if not attribute_mapping : return user user_modified = False for saml_attr , django_attrs in attribute_mapping . items ( ) : attr_value_list = attributes . get ( saml_attr ) if not attr_value_list : logger . debug ( 'Could not find value for "%s", not updating fields "%s"' , saml_attr , django_attrs ) continue for attr in django_attrs : if hasattr ( user , attr ) : user_attr = getattr ( user , attr ) if callable ( user_attr ) : modified = user_attr ( attr_value_list ) else : modified = self . _set_attribute ( user , attr , attr_value_list [ 0 ] ) user_modified = user_modified or modified else : logger . debug ( 'Could not find attribute "%s" on user "%s"' , attr , user ) logger . debug ( 'Sending the pre_save signal' ) signal_modified = any ( [ response for receiver , response in pre_user_save . send_robust ( sender = user . __class__ , instance = user , attributes = attributes , user_modified = user_modified ) ] ) if user_modified or signal_modified or force_save : user . save ( ) return user
Update a user with a set of attributes and returns the updated user .
313
14
250,681
def _set_attribute ( self , obj , attr , value ) : field = obj . _meta . get_field ( attr ) if field . max_length is not None and len ( value ) > field . max_length : cleaned_value = value [ : field . max_length ] logger . warn ( 'The attribute "%s" was trimmed from "%s" to "%s"' , attr , value , cleaned_value ) else : cleaned_value = value old_value = getattr ( obj , attr ) if cleaned_value != old_value : setattr ( obj , attr , cleaned_value ) return True return False
Set an attribute of an object to a specific value .
138
11
250,682
def config_settings_loader ( request = None ) : conf = SPConfig ( ) conf . load ( copy . deepcopy ( settings . SAML_CONFIG ) ) return conf
Utility function to load the pysaml2 configuration .
38
12
250,683
def mkpath ( * segments , * * query ) : # Remove empty segments (e.g. no key specified) segments = [ bytes_to_str ( s ) for s in segments if s is not None ] # Join the segments into a path pathstring = '/' . join ( segments ) # Remove extra slashes pathstring = re . sub ( '/+' , '/' , pathstring ) # Add the query string if it exists _query = { } for key in query : if query [ key ] in [ False , True ] : _query [ key ] = str ( query [ key ] ) . lower ( ) elif query [ key ] is not None : if PY2 and isinstance ( query [ key ] , unicode ) : # noqa _query [ key ] = query [ key ] . encode ( 'utf-8' ) else : _query [ key ] = query [ key ] if len ( _query ) > 0 : pathstring += "?" + urlencode ( _query ) if not pathstring . startswith ( '/' ) : pathstring = '/' + pathstring return pathstring
Constructs the path & query portion of a URI from path segments and a dict .
239
17
250,684
def search_index_path ( self , index = None , * * options ) : if not self . yz_wm_index : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) if index : quote_plus ( index ) return mkpath ( self . yz_wm_index , "index" , index , * * options )
Builds a Yokozuna search index URL .
81
10
250,685
def search_schema_path ( self , index , * * options ) : if not self . yz_wm_schema : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) return mkpath ( self . yz_wm_schema , "schema" , quote_plus ( index ) , * * options )
Builds a Yokozuna search Solr schema URL .
79
12
250,686
def to_op ( self ) : if not self . _adds : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) return changes
Extracts the modification operation from the Hll .
47
11
250,687
def add ( self , element ) : if not isinstance ( element , six . string_types ) : raise TypeError ( "Hll elements can only be strings" ) self . _adds . add ( element )
Adds an element to the HyperLogLog . Datatype cardinality will be updated when the object is saved .
46
23
250,688
def ping ( self ) : status , _ , body = self . _request ( 'GET' , self . ping_path ( ) ) return ( status is not None ) and ( bytes_to_str ( body ) == 'OK' )
Check server is alive over HTTP
51
6
250,689
def stats ( self ) : status , _ , body = self . _request ( 'GET' , self . stats_path ( ) , { 'Accept' : 'application/json' } ) if status == 200 : return json . loads ( bytes_to_str ( body ) ) else : return None
Gets performance statistics and server information
64
7
250,690
def get_keys ( self , bucket , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . key_list_path ( bucket . name , bucket_type = bucket_type , timeout = timeout ) status , _ , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'keys' ] else : raise RiakError ( 'Error listing keys.' )
Fetch a list of keys for the bucket
117
9
250,691
def get_buckets ( self , bucket_type = None , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket_type ) url = self . bucket_list_path ( bucket_type = bucket_type , timeout = timeout ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'buckets' ] else : raise RiakError ( 'Error getting buckets.' )
Fetch a list of all buckets
117
7
250,692
def get_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket properties.' )
Get properties for a bucket
115
5
250,693
def set_bucket_props ( self , bucket , props ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) # Run the request... status , _ , body = self . _request ( 'PUT' , url , headers , content ) if status == 401 : raise SecurityError ( 'Not authorized to set bucket properties.' ) elif status != 204 : raise RiakError ( 'Error setting bucket properties.' ) return True
Set the properties on the bucket object given
152
8
250,694
def clear_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) url = self . bucket_properties_path ( bucket . name ) headers = { 'Content-Type' : 'application/json' } # Run the request... status , _ , _ = self . _request ( 'DELETE' , url , headers , None ) if status == 204 : return True elif status == 405 : return False else : raise RiakError ( 'Error %s clearing bucket properties.' % status )
reset the properties on the bucket object given
146
8
250,695
def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket-type properties.' )
Get properties for a bucket - type
111
7
250,696
def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) # Run the request... status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting bucket-type properties.' ) return True
Set the properties on the bucket - type
129
8
250,697
def mapred ( self , inputs , query , timeout = None ) : # Construct the job, optionally set the timeout... content = self . _construct_mapred_json ( inputs , query , timeout ) # Do the request... url = self . mapred_path ( ) headers = { 'Content-Type' : 'application/json' } status , headers , body = self . _request ( 'POST' , url , headers , content ) # Make sure the expected status code came back... if status != 200 : raise RiakError ( 'Error running MapReduce operation. Headers: %s Body: %s' % ( repr ( headers ) , repr ( body ) ) ) result = json . loads ( bytes_to_str ( body ) ) return result
Run a MapReduce query .
162
7
250,698
def create_search_index ( self , index , schema = None , n_val = None , timeout = None ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( index ) headers = { 'Content-Type' : 'application/json' } content_dict = dict ( ) if schema : content_dict [ 'schema' ] = schema if n_val : content_dict [ 'n_val' ] = n_val if timeout : content_dict [ 'timeout' ] = timeout content = json . dumps ( content_dict ) # Run the request... status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting Search 2.0 index.' ) return True
Create a Solr search index for Yokozuna .
198
11
250,699
def list_search_indexes ( self ) : if not self . yz_wm_index : raise NotImplementedError ( "Search 2.0 administration is not " "supported for this version" ) url = self . search_index_path ( ) # Run the request... status , headers , body = self . _request ( 'GET' , url ) if status == 200 : json_data = json . loads ( bytes_to_str ( body ) ) # Return a list of dictionaries return json_data else : raise RiakError ( 'Error getting Search 2.0 index.' )
Return a list of Solr search indexes from Yokozuna .
128
13