query
stringlengths 5
1.23k
| positive
stringlengths 53
15.2k
| id_
int64 0
252k
| task_name
stringlengths 87
242
| negative
listlengths 20
553
|
|---|---|---|---|---|
Returns the decorator and the undecorated function of given object .
|
def undecorate ( func ) : orig_call_wrapper = lambda x : x for call_wrapper , unwrap in SUPPORTED_DECORATOR . items ( ) : if isinstance ( func , call_wrapper ) : func = unwrap ( func ) orig_call_wrapper = call_wrapper break return orig_call_wrapper , func
| 2,900
|
https://github.com/0k/kids.cache/blob/668f3b966877c4a0855d60e05cc3706cf37e4570/src/kids/cache/__init__.py#L82-L90
|
[
"def",
"volumes_delete",
"(",
"storage_pool",
",",
"logger",
")",
":",
"try",
":",
"for",
"vol_name",
"in",
"storage_pool",
".",
"listVolumes",
"(",
")",
":",
"try",
":",
"vol",
"=",
"storage_pool",
".",
"storageVolLookupByName",
"(",
"vol_name",
")",
"vol",
".",
"delete",
"(",
"0",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volume %s.\"",
",",
"vol_name",
")",
"except",
"libvirt",
".",
"libvirtError",
":",
"logger",
".",
"exception",
"(",
"\"Unable to delete storage volumes.\"",
")"
] |
Market - related commands .
|
def item ( ctx , appid , title ) : ctx . obj [ 'appid' ] = appid ctx . obj [ 'title' ] = title
| 2,901
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L111-L114
|
[
"def",
"handlePortfolio",
"(",
"self",
",",
"msg",
")",
":",
"# log handler msg",
"self",
".",
"log_msg",
"(",
"\"portfolio\"",
",",
"msg",
")",
"# contract identifier",
"contract_tuple",
"=",
"self",
".",
"contract_to_tuple",
"(",
"msg",
".",
"contract",
")",
"contractString",
"=",
"self",
".",
"contractString",
"(",
"contract_tuple",
")",
"# try creating the contract",
"self",
".",
"registerContract",
"(",
"msg",
".",
"contract",
")",
"# new account?",
"if",
"msg",
".",
"accountName",
"not",
"in",
"self",
".",
"_portfolios",
".",
"keys",
"(",
")",
":",
"self",
".",
"_portfolios",
"[",
"msg",
".",
"accountName",
"]",
"=",
"{",
"}",
"self",
".",
"_portfolios",
"[",
"msg",
".",
"accountName",
"]",
"[",
"contractString",
"]",
"=",
"{",
"\"symbol\"",
":",
"contractString",
",",
"\"position\"",
":",
"int",
"(",
"msg",
".",
"position",
")",
",",
"\"marketPrice\"",
":",
"float",
"(",
"msg",
".",
"marketPrice",
")",
",",
"\"marketValue\"",
":",
"float",
"(",
"msg",
".",
"marketValue",
")",
",",
"\"averageCost\"",
":",
"float",
"(",
"msg",
".",
"averageCost",
")",
",",
"\"unrealizedPNL\"",
":",
"float",
"(",
"msg",
".",
"unrealizedPNL",
")",
",",
"\"realizedPNL\"",
":",
"float",
"(",
"msg",
".",
"realizedPNL",
")",
",",
"\"totalPNL\"",
":",
"float",
"(",
"msg",
".",
"realizedPNL",
")",
"+",
"float",
"(",
"msg",
".",
"unrealizedPNL",
")",
",",
"\"account\"",
":",
"msg",
".",
"accountName",
"}",
"# fire callback",
"self",
".",
"ibCallback",
"(",
"caller",
"=",
"\"handlePortfolio\"",
",",
"msg",
"=",
"msg",
")"
] |
Prints out market item price .
|
def get_price ( ctx , currency ) : appid = ctx . obj [ 'appid' ] title = ctx . obj [ 'title' ] item_ = Item ( appid , title ) item_ . get_price_data ( currency ) click . secho ( 'Lowest price: %s %s' % ( item_ . price_lowest , item_ . price_currency ) , fg = 'green' )
| 2,902
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L120-L128
|
[
"def",
"block_diag",
"(",
"*",
"blocks",
":",
"np",
".",
"ndarray",
")",
"->",
"np",
".",
"ndarray",
":",
"for",
"b",
"in",
"blocks",
":",
"if",
"b",
".",
"shape",
"[",
"0",
"]",
"!=",
"b",
".",
"shape",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"'Blocks must be square.'",
")",
"if",
"not",
"blocks",
":",
"return",
"np",
".",
"zeros",
"(",
"(",
"0",
",",
"0",
")",
",",
"dtype",
"=",
"np",
".",
"complex128",
")",
"n",
"=",
"sum",
"(",
"b",
".",
"shape",
"[",
"0",
"]",
"for",
"b",
"in",
"blocks",
")",
"dtype",
"=",
"functools",
".",
"reduce",
"(",
"_merge_dtypes",
",",
"(",
"b",
".",
"dtype",
"for",
"b",
"in",
"blocks",
")",
")",
"result",
"=",
"np",
".",
"zeros",
"(",
"shape",
"=",
"(",
"n",
",",
"n",
")",
",",
"dtype",
"=",
"dtype",
")",
"i",
"=",
"0",
"for",
"b",
"in",
"blocks",
":",
"j",
"=",
"i",
"+",
"b",
".",
"shape",
"[",
"0",
"]",
"result",
"[",
"i",
":",
"j",
",",
"i",
":",
"j",
"]",
"=",
"b",
"i",
"=",
"j",
"return",
"result"
] |
Prints out cards available for application .
|
def get_cards ( ctx ) : appid = ctx . obj [ 'appid' ] app = Application ( appid ) click . secho ( 'Cards for `%s` [appid: %s]' % ( app . title , appid ) , fg = 'green' ) if not app . has_cards : click . secho ( 'This app has no cards.' , fg = 'red' , err = True ) return cards , booster = app . get_cards ( ) def get_line ( card ) : return '%s [market hash: `%s`]' % ( card . title , card . market_hash ) for card in cards . values ( ) : click . echo ( get_line ( card ) ) if booster : click . secho ( '* Booster pack: `%s`' % get_line ( booster ) , fg = 'yellow' ) click . secho ( '* Total cards: %d' % len ( cards ) , fg = 'green' )
| 2,903
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L141-L164
|
[
"def",
"send",
"(",
"self",
",",
"event",
")",
":",
"try",
":",
"self",
".",
"collection",
".",
"insert",
"(",
"event",
",",
"manipulate",
"=",
"False",
")",
"except",
"(",
"PyMongoError",
",",
"BSONError",
")",
":",
"# The event will be lost in case of a connection error or any error",
"# that occurs when trying to insert the event into Mongo.",
"# pymongo will re-connect/re-authenticate automatically",
"# during the next event.",
"msg",
"=",
"'Error inserting to MongoDB event tracker backend'",
"log",
".",
"exception",
"(",
"msg",
")"
] |
Prints out lowest card prices for an application . Comma - separated list of application IDs is supported .
|
def get_card_prices ( ctx , currency ) : appid = ctx . obj [ 'appid' ] detailed = True appids = [ appid ] if ',' in appid : appids = [ appid . strip ( ) for appid in appid . split ( ',' ) ] detailed = False for appid in appids : print_card_prices ( appid , currency , detailed = detailed ) click . echo ( '' )
| 2,904
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L170-L186
|
[
"def",
"wait_until_page_does_not_contain_these_elements",
"(",
"self",
",",
"timeout",
",",
"*",
"locators",
")",
":",
"self",
".",
"_wait_until_no_error",
"(",
"timeout",
",",
"self",
".",
"_wait_for_elements_to_go_away",
",",
"locators",
")"
] |
Prints out total gems count for a Steam user .
|
def get_gems ( ctx ) : username = ctx . obj [ 'username' ] click . secho ( 'Total gems owned by `%s`: %d' % ( username , User ( username ) . gems_total ) , fg = 'green' )
| 2,905
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L199-L205
|
[
"def",
"get_attached_container_create_kwargs",
"(",
"self",
",",
"action",
",",
"container_name",
",",
"kwargs",
"=",
"None",
")",
":",
"client_config",
"=",
"action",
".",
"client_config",
"policy",
"=",
"self",
".",
"_policy",
"config_id",
"=",
"action",
".",
"config_id",
"path",
"=",
"resolve_value",
"(",
"policy",
".",
"default_volume_paths",
"[",
"config_id",
".",
"map_name",
"]",
"[",
"config_id",
".",
"instance_name",
"]",
")",
"user",
"=",
"extract_user",
"(",
"action",
".",
"config",
".",
"user",
")",
"c_kwargs",
"=",
"dict",
"(",
"name",
"=",
"container_name",
",",
"image",
"=",
"self",
".",
"_policy",
".",
"base_image",
",",
"volumes",
"=",
"[",
"path",
"]",
",",
"user",
"=",
"user",
",",
"network_disabled",
"=",
"True",
",",
")",
"hc_extra_kwargs",
"=",
"kwargs",
".",
"pop",
"(",
"'host_config'",
",",
"None",
")",
"if",
"kwargs",
"else",
"None",
"use_host_config",
"=",
"client_config",
".",
"features",
"[",
"'host_config'",
"]",
"if",
"use_host_config",
":",
"hc_kwargs",
"=",
"self",
".",
"get_attached_container_host_config_kwargs",
"(",
"action",
",",
"None",
",",
"kwargs",
"=",
"hc_extra_kwargs",
")",
"if",
"hc_kwargs",
":",
"if",
"use_host_config",
"==",
"USE_HC_MERGE",
":",
"c_kwargs",
".",
"update",
"(",
"hc_kwargs",
")",
"else",
":",
"c_kwargs",
"[",
"'host_config'",
"]",
"=",
"HostConfig",
"(",
"version",
"=",
"client_config",
".",
"version",
",",
"*",
"*",
"hc_kwargs",
")",
"update_kwargs",
"(",
"c_kwargs",
",",
"kwargs",
")",
"return",
"c_kwargs"
] |
Prints out games owned by a Steam user .
|
def get_games ( ctx ) : username = ctx . obj [ 'username' ] games = User ( username ) . get_games_owned ( ) for game in sorted ( games . values ( ) , key = itemgetter ( 'title' ) ) : click . echo ( '%s [appid: %s]' % ( game [ 'title' ] , game [ 'appid' ] ) ) click . secho ( 'Total gems owned by `%s`: %d' % ( username , len ( games ) ) , fg = 'green' )
| 2,906
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L210-L219
|
[
"def",
"_cutadapt_trim",
"(",
"fastq_files",
",",
"quality_format",
",",
"adapters",
",",
"out_files",
",",
"log_file",
",",
"data",
")",
":",
"if",
"all",
"(",
"[",
"utils",
".",
"file_exists",
"(",
"x",
")",
"for",
"x",
"in",
"out_files",
"]",
")",
":",
"return",
"out_files",
"cmd",
"=",
"_cutadapt_trim_cmd",
"(",
"fastq_files",
",",
"quality_format",
",",
"adapters",
",",
"out_files",
",",
"data",
")",
"if",
"len",
"(",
"fastq_files",
")",
"==",
"1",
":",
"of",
"=",
"[",
"out_files",
"[",
"0",
"]",
",",
"log_file",
"]",
"message",
"=",
"\"Trimming %s in single end mode with cutadapt.\"",
"%",
"(",
"fastq_files",
"[",
"0",
"]",
")",
"with",
"file_transaction",
"(",
"data",
",",
"of",
")",
"as",
"of_tx",
":",
"of1_tx",
",",
"log_tx",
"=",
"of_tx",
"do",
".",
"run",
"(",
"cmd",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
",",
"message",
")",
"else",
":",
"of",
"=",
"out_files",
"+",
"[",
"log_file",
"]",
"with",
"file_transaction",
"(",
"data",
",",
"of",
")",
"as",
"tx_out_files",
":",
"of1_tx",
",",
"of2_tx",
",",
"log_tx",
"=",
"tx_out_files",
"tmp_fq1",
"=",
"utils",
".",
"append_stem",
"(",
"of1_tx",
",",
"\".tmp\"",
")",
"tmp_fq2",
"=",
"utils",
".",
"append_stem",
"(",
"of2_tx",
",",
"\".tmp\"",
")",
"singles_file",
"=",
"of1_tx",
"+",
"\".single\"",
"message",
"=",
"\"Trimming %s and %s in paired end mode with cutadapt.\"",
"%",
"(",
"fastq_files",
"[",
"0",
"]",
",",
"fastq_files",
"[",
"1",
"]",
")",
"do",
".",
"run",
"(",
"cmd",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
",",
"message",
")",
"return",
"out_files"
] |
Prints out price stats for booster packs available in Steam user inventory .
|
def get_booster_stats ( ctx , currency ) : username = ctx . obj [ 'username' ] inventory = User ( username ) . _get_inventory_raw ( ) boosters = { } for item in inventory [ 'rgDescriptions' ] . values ( ) : is_booster = False tags = item [ 'tags' ] for tag in tags : if tag [ 'internal_name' ] == TAG_ITEM_CLASS_BOOSTER : is_booster = True break if not is_booster : continue appid = item [ 'market_fee_app' ] title = item [ 'name' ] boosters [ appid ] = title if not boosters : click . secho ( 'User `%s` has no booster packs' % username , fg = 'red' , err = True ) return for appid , title in boosters . items ( ) : click . secho ( 'Found booster: `%s`' % title , fg = 'blue' ) print_card_prices ( appid , currency )
| 2,907
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L225-L255
|
[
"def",
"concatenate_not_none",
"(",
"l",
",",
"axis",
"=",
"0",
")",
":",
"# Get the indexes of the arrays in the list",
"mask",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"l",
")",
")",
":",
"if",
"l",
"[",
"i",
"]",
"is",
"not",
"None",
":",
"mask",
".",
"append",
"(",
"i",
")",
"# Concatenate them",
"l_stacked",
"=",
"np",
".",
"concatenate",
"(",
"[",
"l",
"[",
"i",
"]",
"for",
"i",
"in",
"mask",
"]",
",",
"axis",
"=",
"axis",
")",
"return",
"l_stacked"
] |
Prints out price stats for cards available in Steam user inventory .
|
def get_cards_stats ( ctx , currency , skip_owned , appid , foil ) : username = ctx . obj [ 'username' ] cards_by_app = defaultdict ( list ) inventory = User ( username ) . traverse_inventory ( item_filter = TAG_ITEM_CLASS_CARD ) for item in inventory : appid_ = item . app . appid if not appid or appid_ in appid : cards_by_app [ appid_ ] . append ( item ) if not cards_by_app : click . secho ( 'User `%s` has no cards' % username , fg = 'red' , err = True ) return for appid_ , cards in cards_by_app . items ( ) : app = cards [ 0 ] . app print_card_prices ( app . appid , currency , owned_cards = [ card . title for card in cards ] , skip_owned = skip_owned , foil = foil , )
| 2,908
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/cli.py#L264-L287
|
[
"def",
"save_to_file",
"(",
"self",
",",
"filename",
",",
"remap_dim0",
"=",
"None",
",",
"remap_dim1",
"=",
"None",
")",
":",
"# rows - first index",
"# columns - second index",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"fobj",
":",
"columns",
"=",
"list",
"(",
"sorted",
"(",
"self",
".",
"_dim1",
")",
")",
"for",
"col",
"in",
"columns",
":",
"fobj",
".",
"write",
"(",
"','",
")",
"fobj",
".",
"write",
"(",
"str",
"(",
"remap_dim1",
"[",
"col",
"]",
"if",
"remap_dim1",
"else",
"col",
")",
")",
"fobj",
".",
"write",
"(",
"'\\n'",
")",
"for",
"row",
"in",
"sorted",
"(",
"self",
".",
"_dim0",
")",
":",
"fobj",
".",
"write",
"(",
"str",
"(",
"remap_dim0",
"[",
"row",
"]",
"if",
"remap_dim0",
"else",
"row",
")",
")",
"for",
"col",
"in",
"columns",
":",
"fobj",
".",
"write",
"(",
"','",
")",
"fobj",
".",
"write",
"(",
"str",
"(",
"self",
"[",
"row",
",",
"col",
"]",
")",
")",
"fobj",
".",
"write",
"(",
"'\\n'",
")"
] |
A wrapper for the the entire RADIA sub - graph .
|
def run_radia_with_merge ( job , rna_bam , tumor_bam , normal_bam , univ_options , radia_options ) : spawn = job . wrapJobFn ( run_radia , rna_bam [ 'rna_genome' ] , tumor_bam , normal_bam , univ_options , radia_options , disk = '100M' , memory = '100M' ) . encapsulate ( ) merge = job . wrapJobFn ( merge_perchrom_vcfs , spawn . rv ( ) , univ_options , disk = '100M' , memory = '100M' ) job . addChild ( spawn ) spawn . addChild ( merge ) return merge . rv ( )
| 2,909
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/radia.py#L40-L59
|
[
"def",
"add_item",
"(",
"self",
",",
"item_url",
",",
"item_metadata",
")",
":",
"c",
"=",
"self",
".",
"conn",
".",
"cursor",
"(",
")",
"c",
".",
"execute",
"(",
"\"DELETE FROM items WHERE url=?\"",
",",
"(",
"str",
"(",
"item_url",
")",
",",
")",
")",
"self",
".",
"conn",
".",
"commit",
"(",
")",
"c",
".",
"execute",
"(",
"\"INSERT INTO items VALUES (?, ?, ?)\"",
",",
"(",
"str",
"(",
"item_url",
")",
",",
"item_metadata",
",",
"self",
".",
"__now_iso_8601",
"(",
")",
")",
")",
"self",
".",
"conn",
".",
"commit",
"(",
")",
"c",
".",
"close",
"(",
")"
] |
Spawn a RADIA job for each chromosome on the input bam trios .
|
def run_radia ( job , rna_bam , tumor_bam , normal_bam , univ_options , radia_options ) : if 'rna_genome' in rna_bam . keys ( ) : rna_bam = rna_bam [ 'rna_genome' ] elif set ( rna_bam . keys ( ) ) == { 'rna_genome_sorted.bam' , 'rna_genome_sorted.bam.bai' } : pass else : raise RuntimeError ( 'An improperly formatted dict was passed to rna_bam.' ) bams = { 'tumor_rna' : rna_bam [ 'rna_genome_sorted.bam' ] , 'tumor_rnai' : rna_bam [ 'rna_genome_sorted.bam.bai' ] , 'tumor_dna' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , 'tumor_dnai' : tumor_bam [ 'tumor_dna_fix_pg_sorted.bam.bai' ] , 'normal_dna' : normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , 'normal_dnai' : normal_bam [ 'normal_dna_fix_pg_sorted.bam.bai' ] } # Get a list of chromosomes to process if radia_options [ 'chromosomes' ] : chromosomes = radia_options [ 'chromosomes' ] else : chromosomes = sample_chromosomes ( job , radia_options [ 'genome_fai' ] ) perchrom_radia = defaultdict ( ) for chrom in chromosomes : radia = job . addChildJobFn ( run_radia_perchrom , bams , univ_options , radia_options , chrom , memory = '6G' , disk = PromisedRequirement ( radia_disk , tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , rna_bam [ 'rna_genome_sorted.bam' ] , radia_options [ 'genome_fasta' ] ) ) filter_radia = radia . addChildJobFn ( run_filter_radia , bams , radia . rv ( ) , univ_options , radia_options , chrom , memory = '6G' , disk = PromisedRequirement ( radia_disk , tumor_bam [ 'tumor_dna_fix_pg_sorted.bam' ] , normal_bam [ 'normal_dna_fix_pg_sorted.bam' ] , rna_bam [ 'rna_genome_sorted.bam' ] , radia_options [ 'genome_fasta' ] ) ) perchrom_radia [ chrom ] = filter_radia . rv ( ) job . fileStore . logToMaster ( 'Ran spawn_radia on %s successfully' % univ_options [ 'patient' ] ) return perchrom_radia
| 2,910
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/radia.py#L62-L126
|
[
"def",
"set_variable",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"name_arg",
"=",
"\"\\\\\"",
"+",
"name",
"variable_exists",
"=",
"False",
"for",
"variable",
"in",
"self",
".",
"variables",
":",
"if",
"name_arg",
"==",
"variable",
".",
"arguments",
".",
"_positional_args",
"[",
"0",
"]",
":",
"variable_exists",
"=",
"True",
"break",
"if",
"variable_exists",
":",
"renew",
"=",
"Command",
"(",
"command",
"=",
"\"renewcommand\"",
",",
"arguments",
"=",
"[",
"NoEscape",
"(",
"name_arg",
")",
",",
"value",
"]",
")",
"self",
".",
"append",
"(",
"renew",
")",
"else",
":",
"new",
"=",
"Command",
"(",
"command",
"=",
"\"newcommand\"",
",",
"arguments",
"=",
"[",
"NoEscape",
"(",
"name_arg",
")",
",",
"value",
"]",
")",
"self",
".",
"variables",
".",
"append",
"(",
"new",
")"
] |
Run RADIA call on a single chromosome in the input bams .
|
def run_radia_perchrom ( job , bams , univ_options , radia_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna' ] , 'normal.bam.bai' : bams [ 'normal_dnai' ] , 'genome.fa.tar.gz' : radia_options [ 'genome_fasta' ] , 'genome.fa.fai.tar.gz' : radia_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) for key in ( 'genome.fa' , 'genome.fa.fai' ) : input_files [ key ] = untargz ( input_files [ key + '.tar.gz' ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } radia_output = '' . join ( [ work_dir , '/radia_' , chrom , '.vcf' ] ) radia_log = '' . join ( [ work_dir , '/radia_' , chrom , '_radia.log' ] ) parameters = [ univ_options [ 'patient' ] , # shortID chrom , '-n' , input_files [ 'normal.bam' ] , '-t' , input_files [ 'tumor.bam' ] , '-r' , input_files [ 'rna.bam' ] , '' . join ( [ '--rnaTumorFasta=' , input_files [ 'genome.fa' ] ] ) , '-f' , input_files [ 'genome.fa' ] , '-o' , docker_path ( radia_output ) , '-i' , univ_options [ 'ref' ] , '-m' , input_files [ 'genome.fa' ] , '-d' , 'aarjunrao@soe.ucsc.edu' , '-q' , 'Illumina' , '--disease' , 'CANCER' , '-l' , 'INFO' , '-g' , docker_path ( radia_log ) ] docker_call ( tool = 'radia' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = radia_options [ 'version' ] ) output_file = job . fileStore . writeGlobalFile ( radia_output ) job . fileStore . logToMaster ( 'Ran radia on %s:%s successfully' % ( univ_options [ 'patient' ] , chrom ) ) return output_file
| 2,911
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/radia.py#L129-L178
|
[
"def",
"_create_download_failed_message",
"(",
"exception",
",",
"url",
")",
":",
"message",
"=",
"'Failed to download from:\\n{}\\nwith {}:\\n{}'",
".",
"format",
"(",
"url",
",",
"exception",
".",
"__class__",
".",
"__name__",
",",
"exception",
")",
"if",
"_is_temporal_problem",
"(",
"exception",
")",
":",
"if",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"ConnectionError",
")",
":",
"message",
"+=",
"'\\nPlease check your internet connection and try again.'",
"else",
":",
"message",
"+=",
"'\\nThere might be a problem in connection or the server failed to process '",
"'your request. Please try again.'",
"elif",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"HTTPError",
")",
":",
"try",
":",
"server_message",
"=",
"''",
"for",
"elem",
"in",
"decode_data",
"(",
"exception",
".",
"response",
".",
"content",
",",
"MimeType",
".",
"XML",
")",
":",
"if",
"'ServiceException'",
"in",
"elem",
".",
"tag",
"or",
"'Message'",
"in",
"elem",
".",
"tag",
":",
"server_message",
"+=",
"elem",
".",
"text",
".",
"strip",
"(",
"'\\n\\t '",
")",
"except",
"ElementTree",
".",
"ParseError",
":",
"server_message",
"=",
"exception",
".",
"response",
".",
"text",
"message",
"+=",
"'\\nServer response: \"{}\"'",
".",
"format",
"(",
"server_message",
")",
"return",
"message"
] |
Run filterradia on the RADIA output .
|
def run_filter_radia ( job , bams , radia_file , univ_options , radia_options , chrom ) : work_dir = os . getcwd ( ) input_files = { 'rna.bam' : bams [ 'tumor_rna' ] , 'rna.bam.bai' : bams [ 'tumor_rnai' ] , 'tumor.bam' : bams [ 'tumor_dna' ] , 'tumor.bam.bai' : bams [ 'tumor_dnai' ] , 'normal.bam' : bams [ 'normal_dna' ] , 'normal.bam.bai' : bams [ 'normal_dnai' ] , 'radia.vcf' : radia_file , 'genome.fa.tar.gz' : radia_options [ 'genome_fasta' ] , 'genome.fa.fai.tar.gz' : radia_options [ 'genome_fai' ] , 'cosmic_beds' : radia_options [ 'cosmic_beds' ] , 'dbsnp_beds' : radia_options [ 'dbsnp_beds' ] , 'retrogene_beds' : radia_options [ 'retrogene_beds' ] , 'pseudogene_beds' : radia_options [ 'pseudogene_beds' ] , 'gencode_beds' : radia_options [ 'gencode_beds' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) for key in ( 'genome.fa' , 'genome.fa.fai' ) : input_files [ key ] = untargz ( input_files [ key + '.tar.gz' ] , work_dir ) for key in ( 'cosmic_beds' , 'dbsnp_beds' , 'retrogene_beds' , 'pseudogene_beds' , 'gencode_beds' ) : input_files [ key ] = untargz ( input_files [ key ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } filterradia_log = '' . join ( [ work_dir , '/radia_filtered_' , chrom , '_radia.log' ] ) parameters = [ univ_options [ 'patient' ] , # shortID chrom . lstrip ( 'chr' ) , input_files [ 'radia.vcf' ] , '/data' , '/home/radia/scripts' , '-d' , input_files [ 'dbsnp_beds' ] , '-r' , input_files [ 'retrogene_beds' ] , '-p' , input_files [ 'pseudogene_beds' ] , '-c' , input_files [ 'cosmic_beds' ] , '-t' , input_files [ 'gencode_beds' ] , '--noSnpEff' , '--noBlacklist' , '--noTargets' , '--noRnaBlacklist' , '-f' , input_files [ 'genome.fa' ] , '--log=INFO' , '-g' , docker_path ( filterradia_log ) ] docker_call ( tool = 'filterradia' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = radia_options [ 'version' ] ) output_file = '' . join ( [ work_dir , '/' , chrom , '.vcf' ] ) os . rename ( '' . join ( [ work_dir , '/' , univ_options [ 'patient' ] , '_' , chrom , '.vcf' ] ) , output_file ) output_fsid = job . fileStore . writeGlobalFile ( output_file ) export_results ( job , output_fsid , output_file , univ_options , subfolder = 'mutations/radia' ) job . fileStore . logToMaster ( 'Ran filter-radia on %s:%s successfully' % ( univ_options [ 'patient' ] , chrom ) ) return output_fsid
| 2,912
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/radia.py#L181-L246
|
[
"async",
"def",
"get_lease_async",
"(",
"self",
",",
"partition_id",
")",
":",
"try",
":",
"blob",
"=",
"await",
"self",
".",
"host",
".",
"loop",
".",
"run_in_executor",
"(",
"self",
".",
"executor",
",",
"functools",
".",
"partial",
"(",
"self",
".",
"storage_client",
".",
"get_blob_to_text",
",",
"self",
".",
"lease_container_name",
",",
"partition_id",
")",
")",
"lease",
"=",
"AzureBlobLease",
"(",
")",
"lease",
".",
"with_blob",
"(",
"blob",
")",
"async",
"def",
"state",
"(",
")",
":",
"\"\"\"\n Allow lease to curry storage_client to get state\n \"\"\"",
"try",
":",
"loop",
"=",
"asyncio",
".",
"get_event_loop",
"(",
")",
"res",
"=",
"await",
"loop",
".",
"run_in_executor",
"(",
"self",
".",
"executor",
",",
"functools",
".",
"partial",
"(",
"self",
".",
"storage_client",
".",
"get_blob_properties",
",",
"self",
".",
"lease_container_name",
",",
"partition_id",
")",
")",
"return",
"res",
".",
"properties",
".",
"lease",
".",
"state",
"except",
"Exception",
"as",
"err",
":",
"# pylint: disable=broad-except",
"_logger",
".",
"error",
"(",
"\"Failed to get lease state %r %r\"",
",",
"err",
",",
"partition_id",
")",
"lease",
".",
"state",
"=",
"state",
"return",
"lease",
"except",
"Exception",
"as",
"err",
":",
"# pylint: disable=broad-except",
"_logger",
".",
"error",
"(",
"\"Failed to get lease %r %r\"",
",",
"err",
",",
"partition_id",
")"
] |
Index bamfile using samtools
|
def index_bamfile ( job , bamfile , sample_type , univ_options , samtools_options , sample_info = None , export = True ) : work_dir = os . getcwd ( ) in_bamfile = sample_type if sample_info is not None : assert isinstance ( sample_info , str ) in_bamfile = '_' . join ( [ in_bamfile , sample_info ] ) in_bamfile += '.bam' input_files = { in_bamfile : bamfile } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ 'index' , input_files [ in_bamfile ] ] docker_call ( tool = 'samtools' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = samtools_options [ 'version' ] ) out_bai = '/' . join ( [ work_dir , in_bamfile + '.bai' ] ) output_files = { in_bamfile : bamfile , in_bamfile + '.bai' : job . fileStore . writeGlobalFile ( out_bai ) } if export : export_results ( job , bamfile , os . path . splitext ( out_bai ) [ 0 ] , univ_options , subfolder = 'alignments' ) export_results ( job , output_files [ in_bamfile + '.bai' ] , out_bai , univ_options , subfolder = 'alignments' ) job . fileStore . logToMaster ( 'Ran samtools-index on %s:%s successfully' % ( univ_options [ 'patient' ] , sample_type ) ) return output_files
| 2,913
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/alignment/common.py#L33-L74
|
[
"def",
"get",
"(",
"self",
",",
"url",
",",
"store_on_error",
"=",
"False",
",",
"xpath",
"=",
"None",
",",
"rate_limit",
"=",
"None",
",",
"log_hits",
"=",
"True",
",",
"log_misses",
"=",
"True",
")",
":",
"try",
":",
"# get cached request - if none is found, this throws a NoResultFound exception",
"cached",
"=",
"self",
".",
"_query",
"(",
"url",
",",
"xpath",
")",
".",
"one",
"(",
")",
"if",
"log_hits",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache hit: \"",
"+",
"url",
")",
"# if the cached value is from a request that resulted in an error, throw an exception",
"if",
"cached",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Cached request returned an error, code \"",
"+",
"str",
"(",
"cached",
".",
"status_code",
")",
")",
"except",
"NoResultFound",
":",
"if",
"log_misses",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache miss: \"",
"+",
"url",
")",
"# perform the request",
"try",
":",
"# rate limit",
"if",
"rate_limit",
"is",
"not",
"None",
"and",
"self",
".",
"last_query",
"is",
"not",
"None",
":",
"to_sleep",
"=",
"rate_limit",
"-",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"self",
".",
"last_query",
")",
".",
"total_seconds",
"(",
")",
"if",
"to_sleep",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"to_sleep",
")",
"self",
".",
"last_query",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"status_code",
"=",
"response",
".",
"status_code",
"# get 'text', not 'content', because then we are sure to get unicode",
"content",
"=",
"response",
".",
"text",
"response",
".",
"close",
"(",
")",
"if",
"xpath",
"is",
"not",
"None",
":",
"doc",
"=",
"html",
".",
"fromstring",
"(",
"content",
")",
"nodes",
"=",
"doc",
".",
"xpath",
"(",
"xpath",
")",
"if",
"len",
"(",
"nodes",
")",
"==",
"0",
":",
"# xpath not found; set content and status code, exception is raised below",
"content",
"=",
"\"xpath not found: \"",
"+",
"xpath",
"status_code",
"=",
"ERROR_XPATH_NOT_FOUND",
"else",
":",
"# extract desired node only",
"content",
"=",
"html",
".",
"tostring",
"(",
"nodes",
"[",
"0",
"]",
",",
"encoding",
"=",
"'unicode'",
")",
"except",
"requests",
".",
"ConnectionError",
"as",
"e",
":",
"# on a connection error, write exception information to a response object",
"status_code",
"=",
"ERROR_CONNECTION_ERROR",
"content",
"=",
"str",
"(",
"e",
")",
"# a new request cache object",
"cached",
"=",
"CachedRequest",
"(",
"url",
"=",
"str",
"(",
"url",
")",
",",
"content",
"=",
"content",
",",
"status_code",
"=",
"status_code",
",",
"xpath",
"=",
"xpath",
",",
"queried_on",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"# if desired, store the response even if an error occurred",
"if",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
"or",
"store_on_error",
":",
"self",
".",
"session",
".",
"add",
"(",
"cached",
")",
"self",
".",
"session",
".",
"commit",
"(",
")",
"if",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Error processing the request, \"",
"+",
"str",
"(",
"status_code",
")",
"+",
"\": \"",
"+",
"content",
")",
"return",
"cached",
".",
"content"
] |
Sort bamfile using samtools
|
def sort_bamfile ( job , bamfile , sample_type , univ_options , samtools_options ) : work_dir = os . getcwd ( ) in_bamfile = '' . join ( [ sample_type , '.bam' ] ) out_bamfile = '_' . join ( [ sample_type , 'sorted.bam' ] ) input_files = { in_bamfile : bamfile } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = True ) parameters = [ 'sort' , '-o' , docker_path ( out_bamfile ) , '-O' , 'bam' , '-T' , 'temp_sorted' , '-@' , str ( samtools_options [ 'n' ] ) , input_files [ in_bamfile ] ] docker_call ( tool = 'samtools' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = samtools_options [ 'version' ] ) job . fileStore . deleteGlobalFile ( bamfile ) job . fileStore . logToMaster ( 'Ran samtools-sort on %s:%s successfully' % ( univ_options [ 'patient' ] , sample_type ) ) return job . fileStore . writeGlobalFile ( out_bamfile )
| 2,914
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/alignment/common.py#L77-L105
|
[
"def",
"get",
"(",
"self",
",",
"url",
",",
"store_on_error",
"=",
"False",
",",
"xpath",
"=",
"None",
",",
"rate_limit",
"=",
"None",
",",
"log_hits",
"=",
"True",
",",
"log_misses",
"=",
"True",
")",
":",
"try",
":",
"# get cached request - if none is found, this throws a NoResultFound exception",
"cached",
"=",
"self",
".",
"_query",
"(",
"url",
",",
"xpath",
")",
".",
"one",
"(",
")",
"if",
"log_hits",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache hit: \"",
"+",
"url",
")",
"# if the cached value is from a request that resulted in an error, throw an exception",
"if",
"cached",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Cached request returned an error, code \"",
"+",
"str",
"(",
"cached",
".",
"status_code",
")",
")",
"except",
"NoResultFound",
":",
"if",
"log_misses",
":",
"config",
".",
"logger",
".",
"info",
"(",
"\"Request cache miss: \"",
"+",
"url",
")",
"# perform the request",
"try",
":",
"# rate limit",
"if",
"rate_limit",
"is",
"not",
"None",
"and",
"self",
".",
"last_query",
"is",
"not",
"None",
":",
"to_sleep",
"=",
"rate_limit",
"-",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"self",
".",
"last_query",
")",
".",
"total_seconds",
"(",
")",
"if",
"to_sleep",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"to_sleep",
")",
"self",
".",
"last_query",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"status_code",
"=",
"response",
".",
"status_code",
"# get 'text', not 'content', because then we are sure to get unicode",
"content",
"=",
"response",
".",
"text",
"response",
".",
"close",
"(",
")",
"if",
"xpath",
"is",
"not",
"None",
":",
"doc",
"=",
"html",
".",
"fromstring",
"(",
"content",
")",
"nodes",
"=",
"doc",
".",
"xpath",
"(",
"xpath",
")",
"if",
"len",
"(",
"nodes",
")",
"==",
"0",
":",
"# xpath not found; set content and status code, exception is raised below",
"content",
"=",
"\"xpath not found: \"",
"+",
"xpath",
"status_code",
"=",
"ERROR_XPATH_NOT_FOUND",
"else",
":",
"# extract desired node only",
"content",
"=",
"html",
".",
"tostring",
"(",
"nodes",
"[",
"0",
"]",
",",
"encoding",
"=",
"'unicode'",
")",
"except",
"requests",
".",
"ConnectionError",
"as",
"e",
":",
"# on a connection error, write exception information to a response object",
"status_code",
"=",
"ERROR_CONNECTION_ERROR",
"content",
"=",
"str",
"(",
"e",
")",
"# a new request cache object",
"cached",
"=",
"CachedRequest",
"(",
"url",
"=",
"str",
"(",
"url",
")",
",",
"content",
"=",
"content",
",",
"status_code",
"=",
"status_code",
",",
"xpath",
"=",
"xpath",
",",
"queried_on",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
")",
"# if desired, store the response even if an error occurred",
"if",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
"or",
"store_on_error",
":",
"self",
".",
"session",
".",
"add",
"(",
"cached",
")",
"self",
".",
"session",
".",
"commit",
"(",
")",
"if",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"RuntimeError",
"(",
"\"Error processing the request, \"",
"+",
"str",
"(",
"status_code",
")",
"+",
"\": \"",
"+",
"content",
")",
"return",
"cached",
".",
"content"
] |
Create an identity for a given user instance .
|
def get_identity ( user ) : identity = Identity ( user . id ) if hasattr ( user , 'id' ) : identity . provides . add ( UserNeed ( user . id ) ) for role in getattr ( user , 'roles' , [ ] ) : identity . provides . add ( RoleNeed ( role . name ) ) identity . user = user return identity
| 2,915
|
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/utils.py#L16-L30
|
[
"def",
"_download_video",
"(",
"self",
",",
"video_url",
",",
"video_name",
")",
":",
"filename",
"=",
"'{0:0=2d}_{1}'",
".",
"format",
"(",
"DriverWrappersPool",
".",
"videos_number",
",",
"video_name",
")",
"filename",
"=",
"'{}.mp4'",
".",
"format",
"(",
"get_valid_filename",
"(",
"filename",
")",
")",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"DriverWrappersPool",
".",
"videos_directory",
",",
"filename",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"DriverWrappersPool",
".",
"videos_directory",
")",
":",
"os",
".",
"makedirs",
"(",
"DriverWrappersPool",
".",
"videos_directory",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"video_url",
")",
"open",
"(",
"filepath",
",",
"'wb'",
")",
".",
"write",
"(",
"response",
".",
"content",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Video saved in '%s'\"",
",",
"filepath",
")",
"DriverWrappersPool",
".",
"videos_number",
"+=",
"1"
] |
Converts a object to a items list respecting also slots .
|
def object_to_items ( data_structure ) : items = [ ] # Get all items from dict try : items = list ( data_structure . __dict__ . items ( ) ) except : pass # Get all slots hierarchy = [ data_structure ] try : hierarchy += inspect . getmro ( data_structure ) except : pass slots = [ ] try : for b in hierarchy : try : slots += b . __slots__ except : # pragma: no cover pass except : # pragma: no cover pass # Get attrs from slots for x in slots : items . append ( ( x , getattr ( data_structure , x ) ) ) return items
| 2,916
|
https://github.com/adfinis-sygroup/freeze/blob/61b4fab8a90ed76d685448723baaa57e2bbd5ef9/freeze/xfreeze.py#L251-L279
|
[
"def",
"single_read",
"(",
"self",
",",
"register",
")",
":",
"comm_reg",
"=",
"(",
"0b00010",
"<<",
"3",
")",
"+",
"register",
"if",
"register",
"==",
"self",
".",
"AD7730_STATUS_REG",
":",
"bytes_num",
"=",
"1",
"elif",
"register",
"==",
"self",
".",
"AD7730_DATA_REG",
":",
"bytes_num",
"=",
"3",
"elif",
"register",
"==",
"self",
".",
"AD7730_MODE_REG",
":",
"bytes_num",
"=",
"2",
"elif",
"register",
"==",
"self",
".",
"AD7730_FILTER_REG",
":",
"bytes_num",
"=",
"3",
"elif",
"register",
"==",
"self",
".",
"AD7730_DAC_REG",
":",
"bytes_num",
"=",
"1",
"elif",
"register",
"==",
"self",
".",
"AD7730_OFFSET_REG",
":",
"bytes_num",
"=",
"3",
"elif",
"register",
"==",
"self",
".",
"AD7730_GAIN_REG",
":",
"bytes_num",
"=",
"3",
"elif",
"register",
"==",
"self",
".",
"AD7730_TEST_REG",
":",
"bytes_num",
"=",
"3",
"command",
"=",
"[",
"comm_reg",
"]",
"+",
"(",
"[",
"0x00",
"]",
"*",
"bytes_num",
")",
"spi",
".",
"SPI_write",
"(",
"self",
".",
"CS",
",",
"command",
")",
"data",
"=",
"spi",
".",
"SPI_read",
"(",
"bytes_num",
"+",
"1",
")",
"return",
"data",
"[",
"1",
":",
"]"
] |
Sort a recursive data_structure .
|
def recursive_sort ( data_structure ) : # We don't sory primitve types if not isinstance ( data_structure , _primitive_types ) : is_meta = isinstance ( data_structure , Meta ) was_dict = isinstance ( data_structure , WasDict ) if not ( is_meta or was_dict ) : was_dict = isinstance ( data_structure , dict ) if not was_dict : # Dictize if possible (support objects) try : data_structure = data_structure . __dict__ was_dict = True except : pass # Itemize if possible try : data_structure = data_structure . items ( ) except : pass tlen = - 1 # If item has a length we sort it try : tlen = len ( data_structure ) except : # pragma: no cover pass if tlen != - 1 : # Well there are classes out in the wild that answer to len # but have no indexer. try : if was_dict : return tuple ( sorted ( [ ( recursive_sort ( x [ 0 ] ) , recursive_sort ( x [ 1 ] ) , ) for x in data_structure ] , key = TraversalBasedReprCompare ) ) elif is_meta : return data_structure [ 0 : - 1 ] + [ recursive_sort ( data_structure [ - 1 ] ) ] else : return tuple ( sorted ( [ recursive_sort ( x , ) for x in data_structure ] , key = TraversalBasedReprCompare , ) ) except : # pragma: no cover pass return data_structure
| 2,917
|
https://github.com/adfinis-sygroup/freeze/blob/61b4fab8a90ed76d685448723baaa57e2bbd5ef9/freeze/xfreeze.py#L531-L611
|
[
"def",
"deauthorize_application",
"(",
"request",
")",
":",
"if",
"request",
".",
"facebook",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"facebook_id",
"=",
"request",
".",
"facebook",
".",
"signed_request",
".",
"user",
".",
"id",
")",
"user",
".",
"authorized",
"=",
"False",
"user",
".",
"save",
"(",
")",
"return",
"HttpResponse",
"(",
")",
"else",
":",
"return",
"HttpResponse",
"(",
"status",
"=",
"400",
")"
] |
Yields the leaves of the frozen data - structure pre - order .
|
def traverse_frozen_data ( data_structure ) : parent_stack = [ data_structure ] while parent_stack : node = parent_stack . pop ( 0 ) # We don't iterate strings tlen = - 1 if not isinstance ( node , _string_types ) : # If item has a length we freeze it try : tlen = len ( node ) except : pass if tlen == - 1 : yield node else : parent_stack = list ( node ) + parent_stack
| 2,918
|
https://github.com/adfinis-sygroup/freeze/blob/61b4fab8a90ed76d685448723baaa57e2bbd5ef9/freeze/xfreeze.py#L665-L683
|
[
"def",
"delete_subscriptions",
"(",
"self",
")",
":",
"warnings",
".",
"warn",
"(",
"'This could be slow for large numbers of connected devices.'",
"'If possible, explicitly delete subscriptions known to have been created.'",
")",
"for",
"device",
"in",
"self",
".",
"list_connected_devices",
"(",
")",
":",
"try",
":",
"self",
".",
"delete_device_subscriptions",
"(",
"device_id",
"=",
"device",
".",
"id",
")",
"except",
"CloudApiException",
"as",
"e",
":",
"LOG",
".",
"warning",
"(",
"'failed to remove subscription for %s: %s'",
",",
"device",
".",
"id",
",",
"e",
")",
"continue"
] |
Dump any data - structure or object traverse it depth - first in - order and apply a unified diff .
|
def tree_diff ( a , b , n = 5 , sort = False ) : a = dump ( a ) b = dump ( b ) if not sort : a = vformat ( a ) . split ( "\n" ) b = vformat ( b ) . split ( "\n" ) else : a = vformat ( recursive_sort ( a ) ) . split ( "\n" ) b = vformat ( recursive_sort ( b ) ) . split ( "\n" ) return "\n" . join ( difflib . unified_diff ( a , b , n = n , lineterm = "" ) )
| 2,919
|
https://github.com/adfinis-sygroup/freeze/blob/61b4fab8a90ed76d685448723baaa57e2bbd5ef9/freeze/xfreeze.py#L752-L823
|
[
"def",
"disable_caching",
"(",
"self",
")",
":",
"self",
".",
"caching_enabled",
"=",
"False",
"for",
"c",
"in",
"self",
".",
"values",
"(",
")",
":",
"c",
".",
"disable_cacher",
"(",
")"
] |
Basic group statistics .
|
def stats ( self ) : stats_online = CRef . cint ( ) stats_ingame = CRef . cint ( ) stats_chatting = CRef . cint ( ) self . _iface . get_clan_stats ( self . group_id , stats_online , stats_ingame , stats_chatting , ) return { 'online' : int ( stats_online ) , 'ingame' : int ( stats_ingame ) , 'chatting' : int ( stats_chatting ) , }
| 2,920
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/libsteam/resources/groups.py#L24-L50
|
[
"def",
"seed_args",
"(",
"subparsers",
")",
":",
"seed_parser",
"=",
"subparsers",
".",
"add_parser",
"(",
"'seed'",
")",
"secretfile_args",
"(",
"seed_parser",
")",
"vars_args",
"(",
"seed_parser",
")",
"seed_parser",
".",
"add_argument",
"(",
"'--mount-only'",
",",
"dest",
"=",
"'mount_only'",
",",
"help",
"=",
"'Only mount paths if needed'",
",",
"default",
"=",
"False",
",",
"action",
"=",
"'store_true'",
")",
"thaw_from_args",
"(",
"seed_parser",
")",
"seed_parser",
".",
"add_argument",
"(",
"'--remove-unknown'",
",",
"dest",
"=",
"'remove_unknown'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'Remove mountpoints that are not '",
"'defined in the Secretfile'",
")",
"base_args",
"(",
"seed_parser",
")"
] |
Creates a Trading - Bots project directory structure for the given project NAME in the current directory or optionally in the given DIRECTORY .
|
def startproject ( name , directory , verbosity ) : handle_template ( 'project' , name , target = directory , verbosity = verbosity ) click . echo ( f"Success: '{name}' project was successfully created on '{directory}'" )
| 2,921
|
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/core/management/__init__.py#L101-L107
|
[
"def",
"exit_resync",
"(",
"self",
")",
":",
"print",
"(",
"\"********** exit & resync **********\"",
")",
"try",
":",
"if",
"self",
".",
"client_socket",
":",
"self",
".",
"client_socket",
".",
"close",
"(",
")",
"self",
".",
"client_socket",
"=",
"None",
"try",
":",
"self",
".",
"exit",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"_log_error",
"(",
"e",
")",
"print",
"(",
"\"Pause for exit(s) ...\"",
")",
"time",
".",
"sleep",
"(",
"60",
")",
"except",
"(",
"socket",
".",
"error",
",",
"ConnectionError",
")",
":",
"pass",
"self",
".",
"resync",
"(",
")"
] |
Creates a Bot s directory structure for the given bot NAME in the current directory or optionally in the given DIRECTORY .
|
def createbot ( name , directory , verbosity ) : handle_template ( 'bot' , name , target = directory , verbosity = verbosity ) click . echo ( f"Success: '{name}' bot was successfully created on '{directory}'" )
| 2,922
|
https://github.com/budacom/trading-bots/blob/8cb68bb8d0b5f822108db1cc5dae336e3d3c3452/trading_bots/core/management/__init__.py#L114-L120
|
[
"def",
"get_correspondance_dict",
"(",
"self",
",",
"classA",
",",
"classB",
",",
"restrict",
"=",
"None",
",",
"replace_numeric",
"=",
"True",
")",
":",
"result",
"=",
"{",
"nn",
":",
"None",
"for",
"nn",
"in",
"self",
".",
"data",
"[",
"classA",
"]",
".",
"values",
"}",
"if",
"restrict",
"is",
"None",
":",
"df",
"=",
"self",
".",
"data",
".",
"copy",
"(",
")",
"else",
":",
"df",
"=",
"self",
".",
"data",
"[",
"restrict",
"]",
".",
"copy",
"(",
")",
"if",
"replace_numeric",
"and",
"df",
"[",
"classB",
"]",
".",
"dtype",
".",
"kind",
"in",
"'bifc'",
":",
"df",
".",
"loc",
"[",
"~",
"df",
"[",
"classB",
"]",
".",
"isnull",
"(",
")",
",",
"classB",
"]",
"=",
"classB",
"df",
".",
"loc",
"[",
"df",
"[",
"classB",
"]",
".",
"isnull",
"(",
")",
",",
"classB",
"]",
"=",
"None",
"result",
".",
"update",
"(",
"df",
".",
"groupby",
"(",
"classA",
")",
".",
"aggregate",
"(",
"lambda",
"x",
":",
"list",
"(",
"x",
".",
"unique",
"(",
")",
")",
")",
".",
"to_dict",
"(",
")",
"[",
"classB",
"]",
")",
"return",
"result"
] |
Returns user state . See UserState .
|
def get_state ( self , as_str = False ) : uid = self . user_id if self . _iface_user . get_id ( ) == uid : result = self . _iface . get_my_state ( ) else : result = self . _iface . get_state ( uid ) if as_str : return UserState . get_alias ( result ) return result
| 2,923
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/libsteam/resources/user.py#L95-L113
|
[
"def",
"removeAllChildrenAtIndex",
"(",
"self",
",",
"parentIndex",
")",
":",
"if",
"not",
"parentIndex",
".",
"isValid",
"(",
")",
":",
"logger",
".",
"debug",
"(",
"\"No valid item selected for deletion (ignored).\"",
")",
"return",
"parentItem",
"=",
"self",
".",
"getItem",
"(",
"parentIndex",
",",
"None",
")",
"logger",
".",
"debug",
"(",
"\"Removing children of {!r}\"",
".",
"format",
"(",
"parentItem",
")",
")",
"assert",
"parentItem",
",",
"\"parentItem not found\"",
"#firstChildRow = self.index(0, 0, parentIndex).row()",
"#lastChildRow = self.index(parentItem.nChildren()-1, 0, parentIndex).row()",
"#logger.debug(\"Removing rows: {} to {}\".format(firstChildRow, lastChildRow))",
"#self.beginRemoveRows(parentIndex, firstChildRow, lastChildRow)",
"self",
".",
"beginRemoveRows",
"(",
"parentIndex",
",",
"0",
",",
"parentItem",
".",
"nChildren",
"(",
")",
"-",
"1",
")",
"try",
":",
"parentItem",
".",
"removeAllChildren",
"(",
")",
"finally",
":",
"self",
".",
"endRemoveRows",
"(",
")",
"logger",
".",
"debug",
"(",
"\"removeAllChildrenAtIndex completed\"",
")"
] |
Add system roles Needs to users identities .
|
def load_permissions_on_identity_loaded ( sender , identity ) : identity . provides . add ( any_user ) # if the user is not anonymous if current_user . is_authenticated : # Add the need provided to authenticated users identity . provides . add ( authenticated_user )
| 2,924
|
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/invenio_access/loaders.py#L15-L29
|
[
"def",
"stop_experiment",
"(",
"args",
")",
":",
"experiment_id_list",
"=",
"parse_ids",
"(",
"args",
")",
"if",
"experiment_id_list",
":",
"experiment_config",
"=",
"Experiments",
"(",
")",
"experiment_dict",
"=",
"experiment_config",
".",
"get_all_experiments",
"(",
")",
"for",
"experiment_id",
"in",
"experiment_id_list",
":",
"print_normal",
"(",
"'Stoping experiment %s'",
"%",
"experiment_id",
")",
"nni_config",
"=",
"Config",
"(",
"experiment_dict",
"[",
"experiment_id",
"]",
"[",
"'fileName'",
"]",
")",
"rest_port",
"=",
"nni_config",
".",
"get_config",
"(",
"'restServerPort'",
")",
"rest_pid",
"=",
"nni_config",
".",
"get_config",
"(",
"'restServerPid'",
")",
"if",
"rest_pid",
":",
"kill_command",
"(",
"rest_pid",
")",
"tensorboard_pid_list",
"=",
"nni_config",
".",
"get_config",
"(",
"'tensorboardPidList'",
")",
"if",
"tensorboard_pid_list",
":",
"for",
"tensorboard_pid",
"in",
"tensorboard_pid_list",
":",
"try",
":",
"kill_command",
"(",
"tensorboard_pid",
")",
"except",
"Exception",
"as",
"exception",
":",
"print_error",
"(",
"exception",
")",
"nni_config",
".",
"set_config",
"(",
"'tensorboardPidList'",
",",
"[",
"]",
")",
"print_normal",
"(",
"'Stop experiment success!'",
")",
"experiment_config",
".",
"update_experiment",
"(",
"experiment_id",
",",
"'status'",
",",
"'STOPPED'",
")",
"time_now",
"=",
"time",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
",",
"time",
".",
"localtime",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
"experiment_config",
".",
"update_experiment",
"(",
"experiment_id",
",",
"'endTime'",
",",
"str",
"(",
"time_now",
")",
")"
] |
Prints the errors observed for a file
|
def print_errors ( self , file_name ) : for error in self . get_messages ( file_name ) : print ( '\t' , error . __unicode__ ( ) )
| 2,925
|
https://github.com/HEPData/hepdata-validator/blob/d0b0cab742a009c8f0e8aac9f8c8e434a524d43c/hepdata_validator/__init__.py#L98-L103
|
[
"def",
"translateprotocolmask",
"(",
"protocol",
")",
":",
"pcscprotocol",
"=",
"0",
"if",
"None",
"!=",
"protocol",
":",
"if",
"CardConnection",
".",
"T0_protocol",
"&",
"protocol",
":",
"pcscprotocol",
"|=",
"SCARD_PROTOCOL_T0",
"if",
"CardConnection",
".",
"T1_protocol",
"&",
"protocol",
":",
"pcscprotocol",
"|=",
"SCARD_PROTOCOL_T1",
"if",
"CardConnection",
".",
"RAW_protocol",
"&",
"protocol",
":",
"pcscprotocol",
"|=",
"SCARD_PROTOCOL_RAW",
"if",
"CardConnection",
".",
"T15_protocol",
"&",
"protocol",
":",
"pcscprotocol",
"|=",
"SCARD_PROTOCOL_T15",
"return",
"pcscprotocol"
] |
Return cleaned fields as a dict determine which geom takes precedence .
|
def clean ( self ) : data = super ( RasterQueryForm , self ) . clean ( ) geom = data . pop ( 'upload' , None ) or data . pop ( 'bbox' , None ) if geom : data [ 'g' ] = geom return data
| 2,926
|
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/forms/forms.py#L125-L133
|
[
"def",
"libvlc_vlm_show_media",
"(",
"p_instance",
",",
"psz_name",
")",
":",
"f",
"=",
"_Cfunctions",
".",
"get",
"(",
"'libvlc_vlm_show_media'",
",",
"None",
")",
"or",
"_Cfunction",
"(",
"'libvlc_vlm_show_media'",
",",
"(",
"(",
"1",
",",
")",
",",
"(",
"1",
",",
")",
",",
")",
",",
"string_result",
",",
"ctypes",
".",
"c_void_p",
",",
"Instance",
",",
"ctypes",
".",
"c_char_p",
")",
"return",
"f",
"(",
"p_instance",
",",
"psz_name",
")"
] |
Register a matcher associated to one or more aliases . Each alias given is also normalized .
|
def register ( matcher , * aliases ) : docstr = matcher . __doc__ if matcher . __doc__ is not None else '' helpmatchers [ matcher ] = docstr . strip ( ) for alias in aliases : matchers [ alias ] = matcher # Map a normalized version of the alias norm = normalize ( alias ) normalized [ norm ] = alias # Map a version without snake case norm = norm . replace ( '_' , '' ) normalized [ norm ] = alias
| 2,927
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L54-L68
|
[
"def",
"is_running",
"(",
"self",
")",
":",
"if",
"self",
".",
"block",
":",
"return",
"False",
"return",
"self",
".",
"thread",
".",
"is_alive",
"(",
")",
"or",
"self",
".",
"process",
".",
"poll",
"(",
")",
"is",
"None"
] |
Normalizes an alias by removing adverbs defined in IGNORED_WORDS
|
def normalize ( alias ) : # Convert from CamelCase to snake_case alias = re . sub ( r'([a-z])([A-Z])' , r'\1_\2' , alias ) # Ignore words words = alias . lower ( ) . split ( '_' ) words = filter ( lambda w : w not in IGNORED_WORDS , words ) return '_' . join ( words )
| 2,928
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L94-L102
|
[
"def",
"sqlite_by_df",
"(",
"self",
",",
"destination",
",",
"progress",
")",
":",
"db",
"=",
"SQLiteDatabase",
"(",
"destination",
")",
"db",
".",
"create",
"(",
")",
"for",
"table",
"in",
"progress",
"(",
"self",
".",
"real_tables",
")",
":",
"self",
"[",
"table",
"]",
".",
"to_sql",
"(",
"table",
",",
"con",
"=",
"db",
".",
"connection",
")",
"db",
".",
"close",
"(",
")"
] |
Tries to find a matcher callable associated to the given alias . If an exact match does not exists it will try normalizing it and even removing underscores to find one .
|
def lookup ( alias ) : if alias in matchers : return matchers [ alias ] else : norm = normalize ( alias ) if norm in normalized : alias = normalized [ norm ] return matchers [ alias ] # Check without snake case if - 1 != alias . find ( '_' ) : norm = normalize ( alias ) . replace ( '_' , '' ) return lookup ( norm ) return None
| 2,929
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L105-L124
|
[
"def",
"set_nonblock",
"(",
"fd",
")",
":",
"# type: (int) -> None",
"fcntl",
".",
"fcntl",
"(",
"fd",
",",
"fcntl",
".",
"F_SETFL",
",",
"fcntl",
".",
"fcntl",
"(",
"fd",
",",
"fcntl",
".",
"F_GETFL",
")",
"|",
"os",
".",
"O_NONBLOCK",
")"
] |
Suggest a list of aliases which are similar enough
|
def suggest ( alias , max = 3 , cutoff = 0.5 ) : aliases = matchers . keys ( ) similar = get_close_matches ( alias , aliases , n = max , cutoff = cutoff ) return similar
| 2,930
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L127-L134
|
[
"def",
"_read_footer",
"(",
"file_obj",
")",
":",
"footer_size",
"=",
"_get_footer_size",
"(",
"file_obj",
")",
"if",
"logger",
".",
"isEnabledFor",
"(",
"logging",
".",
"DEBUG",
")",
":",
"logger",
".",
"debug",
"(",
"\"Footer size in bytes: %s\"",
",",
"footer_size",
")",
"file_obj",
".",
"seek",
"(",
"-",
"(",
"8",
"+",
"footer_size",
")",
",",
"2",
")",
"# seek to beginning of footer",
"tin",
"=",
"TFileTransport",
"(",
"file_obj",
")",
"pin",
"=",
"TCompactProtocolFactory",
"(",
")",
".",
"get_protocol",
"(",
"tin",
")",
"fmd",
"=",
"parquet_thrift",
".",
"FileMetaData",
"(",
")",
"fmd",
".",
"read",
"(",
"pin",
")",
"return",
"fmd"
] |
Get a list of chromosomes in the input data .
|
def sample_chromosomes ( job , genome_fai_file ) : work_dir = os . getcwd ( ) genome_fai = untargz ( job . fileStore . readGlobalFile ( genome_fai_file ) , work_dir ) return chromosomes_from_fai ( genome_fai )
| 2,931
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L25-L35
|
[
"def",
"min_renewable_share",
"(",
"network",
",",
"snapshots",
",",
"share",
"=",
"0.72",
")",
":",
"renewables",
"=",
"[",
"'wind_onshore'",
",",
"'wind_offshore'",
",",
"'biomass'",
",",
"'solar'",
",",
"'run_of_river'",
"]",
"res",
"=",
"list",
"(",
"network",
".",
"generators",
".",
"index",
"[",
"network",
".",
"generators",
".",
"carrier",
".",
"isin",
"(",
"renewables",
")",
"]",
")",
"total",
"=",
"list",
"(",
"network",
".",
"generators",
".",
"index",
")",
"snapshots",
"=",
"network",
".",
"snapshots",
"def",
"_rule",
"(",
"m",
")",
":",
"\"\"\"\n \"\"\"",
"renewable_production",
"=",
"sum",
"(",
"m",
".",
"generator_p",
"[",
"gen",
",",
"sn",
"]",
"for",
"gen",
"in",
"res",
"for",
"sn",
"in",
"snapshots",
")",
"total_production",
"=",
"sum",
"(",
"m",
".",
"generator_p",
"[",
"gen",
",",
"sn",
"]",
"for",
"gen",
"in",
"total",
"for",
"sn",
"in",
"snapshots",
")",
"return",
"(",
"renewable_production",
">=",
"total_production",
"*",
"share",
")",
"network",
".",
"model",
".",
"min_renewable_share",
"=",
"Constraint",
"(",
"rule",
"=",
"_rule",
")"
] |
Aggregate all the called mutations .
|
def run_mutation_aggregator ( job , mutation_results , univ_options ) : # Setup an input data structure for the merge function out = { } for chrom in mutation_results [ 'mutect' ] . keys ( ) : out [ chrom ] = job . addChildJobFn ( merge_perchrom_mutations , chrom , mutation_results , univ_options ) . rv ( ) merged_snvs = job . addFollowOnJobFn ( merge_perchrom_vcfs , out , 'merged' , univ_options ) job . fileStore . logToMaster ( 'Aggregated mutations for %s successfully' % univ_options [ 'patient' ] ) return merged_snvs . rv ( )
| 2,932
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L54-L71
|
[
"def",
"relative_humidity_wet_psychrometric",
"(",
"dry_bulb_temperature",
",",
"web_bulb_temperature",
",",
"pressure",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"psychrometric_vapor_pressure_wet",
"(",
"dry_bulb_temperature",
",",
"web_bulb_temperature",
",",
"pressure",
",",
"*",
"*",
"kwargs",
")",
"/",
"saturation_vapor_pressure",
"(",
"dry_bulb_temperature",
")",
")"
] |
Merge the mutation calls for a single chromosome .
|
def merge_perchrom_mutations ( job , chrom , mutations , univ_options ) : work_dir = os . getcwd ( ) from protect . mutation_calling . muse import process_muse_vcf from protect . mutation_calling . mutect import process_mutect_vcf from protect . mutation_calling . radia import process_radia_vcf from protect . mutation_calling . somaticsniper import process_somaticsniper_vcf from protect . mutation_calling . strelka import process_strelka_vcf mutations . pop ( 'indels' ) mutations [ 'strelka_indels' ] = mutations [ 'strelka' ] [ 'indels' ] mutations [ 'strelka_snvs' ] = mutations [ 'strelka' ] [ 'snvs' ] vcf_processor = { 'snvs' : { 'mutect' : process_mutect_vcf , 'muse' : process_muse_vcf , 'radia' : process_radia_vcf , 'somaticsniper' : process_somaticsniper_vcf , 'strelka_snvs' : process_strelka_vcf } , 'indels' : { 'strelka_indels' : process_strelka_vcf } } # 'fusions': lambda x: None, # 'indels': lambda x: None} # For now, let's just say 2 out of n need to call it. # num_preds = len(mutations) # majority = int((num_preds + 0.5) / 2) majority = { 'snvs' : 2 , 'indels' : 1 } accepted_hits = defaultdict ( dict ) for mut_type in vcf_processor . keys ( ) : # Get input files perchrom_mutations = { caller : vcf_processor [ mut_type ] [ caller ] ( job , mutations [ caller ] [ chrom ] , work_dir , univ_options ) for caller in vcf_processor [ mut_type ] } # Process the strelka key perchrom_mutations [ 'strelka' ] = perchrom_mutations [ 'strelka_' + mut_type ] perchrom_mutations . pop ( 'strelka_' + mut_type ) # Read in each file to a dict vcf_lists = { caller : read_vcf ( vcf_file ) for caller , vcf_file in perchrom_mutations . items ( ) } all_positions = list ( set ( itertools . chain ( * vcf_lists . values ( ) ) ) ) for position in sorted ( all_positions ) : hits = { caller : position in vcf_lists [ caller ] for caller in perchrom_mutations . keys ( ) } if sum ( hits . values ( ) ) >= majority [ mut_type ] : callers = ',' . join ( [ caller for caller , hit in hits . items ( ) if hit ] ) assert position [ 1 ] not in accepted_hits [ position [ 0 ] ] accepted_hits [ position [ 0 ] ] [ position [ 1 ] ] = ( position [ 2 ] , position [ 3 ] , callers ) with open ( '' . join ( [ work_dir , '/' , chrom , '.vcf' ] ) , 'w' ) as outfile : print ( '##fileformat=VCFv4.0' , file = outfile ) print ( '##INFO=<ID=callers,Number=.,Type=String,Description=List of supporting callers.' , file = outfile ) print ( '#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO' , file = outfile ) for chrom in chrom_sorted ( accepted_hits . keys ( ) ) : for position in sorted ( accepted_hits [ chrom ] ) : print ( chrom , position , '.' , accepted_hits [ chrom ] [ position ] [ 0 ] , accepted_hits [ chrom ] [ position ] [ 1 ] , '.' , 'PASS' , 'callers=' + accepted_hits [ chrom ] [ position ] [ 2 ] , sep = '\t' , file = outfile ) fsid = job . fileStore . writeGlobalFile ( outfile . name ) export_results ( job , fsid , outfile . name , univ_options , subfolder = 'mutations/merged' ) return fsid
| 2,933
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L74-L143
|
[
"def",
"_create_download_failed_message",
"(",
"exception",
",",
"url",
")",
":",
"message",
"=",
"'Failed to download from:\\n{}\\nwith {}:\\n{}'",
".",
"format",
"(",
"url",
",",
"exception",
".",
"__class__",
".",
"__name__",
",",
"exception",
")",
"if",
"_is_temporal_problem",
"(",
"exception",
")",
":",
"if",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"ConnectionError",
")",
":",
"message",
"+=",
"'\\nPlease check your internet connection and try again.'",
"else",
":",
"message",
"+=",
"'\\nThere might be a problem in connection or the server failed to process '",
"'your request. Please try again.'",
"elif",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"HTTPError",
")",
":",
"try",
":",
"server_message",
"=",
"''",
"for",
"elem",
"in",
"decode_data",
"(",
"exception",
".",
"response",
".",
"content",
",",
"MimeType",
".",
"XML",
")",
":",
"if",
"'ServiceException'",
"in",
"elem",
".",
"tag",
"or",
"'Message'",
"in",
"elem",
".",
"tag",
":",
"server_message",
"+=",
"elem",
".",
"text",
".",
"strip",
"(",
"'\\n\\t '",
")",
"except",
"ElementTree",
".",
"ParseError",
":",
"server_message",
"=",
"exception",
".",
"response",
".",
"text",
"message",
"+=",
"'\\nServer response: \"{}\"'",
".",
"format",
"(",
"server_message",
")",
"return",
"message"
] |
Read a vcf file to a dict of lists .
|
def read_vcf ( vcf_file ) : vcf_dict = [ ] with open ( vcf_file , 'r' ) as invcf : for line in invcf : if line . startswith ( '#' ) : continue line = line . strip ( ) . split ( ) vcf_dict . append ( ( line [ 0 ] , line [ 1 ] , line [ 3 ] , line [ 4 ] ) ) return vcf_dict
| 2,934
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L146-L161
|
[
"def",
"set_acceptance",
"(",
"self",
",",
"channel",
"=",
"Channel",
".",
"CHANNEL_CH0",
",",
"AMR",
"=",
"AMR_ALL",
",",
"ACR",
"=",
"ACR_ALL",
")",
":",
"UcanSetAcceptanceEx",
"(",
"self",
".",
"_handle",
",",
"channel",
",",
"AMR",
",",
"ACR",
")"
] |
Merge per - chromosome vcf files into a single genome level vcf .
|
def merge_perchrom_vcfs ( job , perchrom_vcfs , tool_name , univ_options ) : work_dir = os . getcwd ( ) input_files = { '' . join ( [ chrom , '.vcf' ] ) : jsid for chrom , jsid in perchrom_vcfs . items ( ) } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) first = True with open ( '' . join ( [ work_dir , '/' , 'all_merged.vcf' ] ) , 'w' ) as outvcf : for chromvcfname in chrom_sorted ( [ x . rstrip ( '.vcf' ) for x in input_files . keys ( ) ] ) : with open ( input_files [ chromvcfname + '.vcf' ] , 'r' ) as infile : for line in infile : line = line . strip ( ) if line . startswith ( '#' ) : if first : print ( line , file = outvcf ) continue first = False print ( line , file = outvcf ) output_file = job . fileStore . writeGlobalFile ( outvcf . name ) export_results ( job , output_file , outvcf . name , univ_options , subfolder = 'mutations/' + tool_name ) job . fileStore . logToMaster ( 'Ran merge_perchrom_vcfs for %s successfully' % tool_name ) return output_file
| 2,935
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L164-L192
|
[
"def",
"key_summary",
"(",
"keyjar",
",",
"issuer",
")",
":",
"try",
":",
"kbl",
"=",
"keyjar",
"[",
"issuer",
"]",
"except",
"KeyError",
":",
"return",
"''",
"else",
":",
"key_list",
"=",
"[",
"]",
"for",
"kb",
"in",
"kbl",
":",
"for",
"key",
"in",
"kb",
".",
"keys",
"(",
")",
":",
"if",
"key",
".",
"inactive_since",
":",
"key_list",
".",
"append",
"(",
"'*{}:{}:{}'",
".",
"format",
"(",
"key",
".",
"kty",
",",
"key",
".",
"use",
",",
"key",
".",
"kid",
")",
")",
"else",
":",
"key_list",
".",
"append",
"(",
"'{}:{}:{}'",
".",
"format",
"(",
"key",
".",
"kty",
",",
"key",
".",
"use",
",",
"key",
".",
"kid",
")",
")",
"return",
"', '",
".",
"join",
"(",
"key_list",
")"
] |
Un - merge a vcf file into per - chromosome vcfs .
|
def unmerge ( job , input_vcf , tool_name , chromosomes , tool_options , univ_options ) : work_dir = os . getcwd ( ) input_files = { 'input.vcf' : input_vcf , 'genome.fa.fai.tar.gz' : tool_options [ 'genome_fai' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) input_files [ 'genome.fa.fai' ] = untargz ( input_files [ 'genome.fa.fai.tar.gz' ] , work_dir ) read_chromosomes = defaultdict ( ) with open ( input_files [ 'input.vcf' ] , 'r' ) as in_vcf : header = [ ] for line in in_vcf : if line . startswith ( '#' ) : header . append ( line ) continue line = line . strip ( ) chrom = line . split ( ) [ 0 ] if chrom in read_chromosomes : print ( line , file = read_chromosomes [ chrom ] ) else : read_chromosomes [ chrom ] = open ( os . path . join ( os . getcwd ( ) , chrom + '.vcf' ) , 'w' ) print ( '' . join ( header ) , file = read_chromosomes [ chrom ] , end = '' ) print ( line , file = read_chromosomes [ chrom ] ) # Process chromosomes that had no mutations for chrom in set ( chromosomes ) . difference ( set ( read_chromosomes . keys ( ) ) ) : read_chromosomes [ chrom ] = open ( os . path . join ( os . getcwd ( ) , chrom + '.vcf' ) , 'w' ) print ( '' . join ( header ) , file = read_chromosomes [ chrom ] , end = '' ) outdict = { } chroms = set ( chromosomes ) . intersection ( set ( read_chromosomes . keys ( ) ) ) for chrom , chromvcf in read_chromosomes . items ( ) : chromvcf . close ( ) if chrom not in chroms : continue outdict [ chrom ] = job . fileStore . writeGlobalFile ( chromvcf . name ) export_results ( job , outdict [ chrom ] , chromvcf . name , univ_options , subfolder = 'mutations/' + tool_name ) return outdict
| 2,936
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/mutation_calling/common.py#L195-L243
|
[
"def",
"_calculate_session_expiry",
"(",
"self",
",",
"request",
",",
"user_info",
")",
":",
"access_token_expiry_timestamp",
"=",
"self",
".",
"_get_access_token_expiry",
"(",
"request",
")",
"id_token_expiry_timestamp",
"=",
"self",
".",
"_get_id_token_expiry",
"(",
"user_info",
")",
"now_in_seconds",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"# The session length is set to match whichever token expiration time is closer.",
"earliest_expiration_timestamp",
"=",
"min",
"(",
"access_token_expiry_timestamp",
",",
"id_token_expiry_timestamp",
")",
"seconds_until_expiry",
"=",
"earliest_expiration_timestamp",
"-",
"now_in_seconds",
"if",
"seconds_until_expiry",
"<=",
"0",
":",
"raise",
"AuthError",
"(",
"'Session expiry time has already passed!'",
")",
"return",
"seconds_until_expiry"
] |
Returns a Feature or FeatureCollection .
|
def as_feature ( data ) : if not isinstance ( data , ( Feature , FeatureCollection ) ) : if is_featurelike ( data ) : data = Feature ( * * data ) elif has_features ( data ) : data = FeatureCollection ( * * data ) elif isinstance ( data , collections . Sequence ) : data = FeatureCollection ( features = data ) elif has_layer ( data ) : data = LayerCollection ( data ) elif has_coordinates ( data ) : data = Feature ( geometry = data ) elif isinstance ( data , collections . Mapping ) and not data : data = Feature ( ) return data
| 2,937
|
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/collections.py#L9-L28
|
[
"def",
"parse_from_file",
"(",
"filename",
",",
"nodata",
"=",
"False",
")",
":",
"header",
"=",
"None",
"with",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"as",
"file",
":",
"header",
"=",
"read_machine_header",
"(",
"file",
")",
"meta_raw",
"=",
"file",
".",
"read",
"(",
"header",
"[",
"'meta_len'",
"]",
")",
"meta",
"=",
"__parse_meta",
"(",
"meta_raw",
",",
"header",
")",
"data",
"=",
"b''",
"if",
"not",
"nodata",
":",
"data",
"=",
"__decompress",
"(",
"meta",
",",
"file",
".",
"read",
"(",
"header",
"[",
"'data_len'",
"]",
")",
")",
"return",
"header",
",",
"meta",
",",
"data"
] |
Returns true for a multi - layer dict of FeatureCollections .
|
def has_layer ( fcollection ) : for val in six . viewvalues ( fcollection ) : if has_features ( val ) : return True return False
| 2,938
|
https://github.com/bkg/django-spillway/blob/c488a62642430b005f1e0d4a19e160d8d5964b67/spillway/collections.py#L51-L56
|
[
"def",
"remove_stale_javascripts",
"(",
"portal",
")",
":",
"logger",
".",
"info",
"(",
"\"Removing stale javascripts ...\"",
")",
"for",
"js",
"in",
"JAVASCRIPTS_TO_REMOVE",
":",
"logger",
".",
"info",
"(",
"\"Unregistering JS %s\"",
"%",
"js",
")",
"portal",
".",
"portal_javascripts",
".",
"unregisterResource",
"(",
"js",
")"
] |
A wrapper for run_rsem using the results from run_star as input .
|
def wrap_rsem ( job , star_bams , univ_options , rsem_options ) : rsem = job . addChildJobFn ( run_rsem , star_bams [ 'rna_transcriptome.bam' ] , univ_options , rsem_options , cores = rsem_options [ 'n' ] , disk = PromisedRequirement ( rsem_disk , star_bams , rsem_options [ 'index' ] ) ) return rsem . rv ( )
| 2,939
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/expression_profiling/rsem.py#L36-L54
|
[
"def",
"initialize_communities_bucket",
"(",
")",
":",
"bucket_id",
"=",
"UUID",
"(",
"current_app",
".",
"config",
"[",
"'COMMUNITIES_BUCKET_UUID'",
"]",
")",
"if",
"Bucket",
".",
"query",
".",
"get",
"(",
"bucket_id",
")",
":",
"raise",
"FilesException",
"(",
"\"Bucket with UUID {} already exists.\"",
".",
"format",
"(",
"bucket_id",
")",
")",
"else",
":",
"storage_class",
"=",
"current_app",
".",
"config",
"[",
"'FILES_REST_DEFAULT_STORAGE_CLASS'",
"]",
"location",
"=",
"Location",
".",
"get_default",
"(",
")",
"bucket",
"=",
"Bucket",
"(",
"id",
"=",
"bucket_id",
",",
"location",
"=",
"location",
",",
"default_storage_class",
"=",
"storage_class",
")",
"db",
".",
"session",
".",
"add",
"(",
"bucket",
")",
"db",
".",
"session",
".",
"commit",
"(",
")"
] |
Run rsem on the input RNA bam .
|
def run_rsem ( job , rna_bam , univ_options , rsem_options ) : work_dir = os . getcwd ( ) input_files = { 'star_transcriptome.bam' : rna_bam , 'rsem_index.tar.gz' : rsem_options [ 'index' ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) input_files [ 'rsem_index' ] = untargz ( input_files [ 'rsem_index.tar.gz' ] , work_dir ) input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } parameters = [ '--paired-end' , '-p' , str ( rsem_options [ 'n' ] ) , '--bam' , input_files [ 'star_transcriptome.bam' ] , '--no-bam-output' , '/' . join ( [ input_files [ 'rsem_index' ] , univ_options [ 'ref' ] ] ) , 'rsem' ] docker_call ( tool = 'rsem' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = rsem_options [ 'version' ] ) output_files = { } for filename in ( 'rsem.genes.results' , 'rsem.isoforms.results' ) : output_files [ filename ] = job . fileStore . writeGlobalFile ( '/' . join ( [ work_dir , filename ] ) ) export_results ( job , output_files [ filename ] , '/' . join ( [ work_dir , filename ] ) , univ_options , subfolder = 'expression' ) job . fileStore . logToMaster ( 'Ran rsem on %s successfully' % univ_options [ 'patient' ] ) return output_files
| 2,940
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/expression_profiling/rsem.py#L57-L95
|
[
"def",
"new_cast_status",
"(",
"self",
",",
"cast_status",
")",
":",
"new_channel",
"=",
"self",
".",
"destination_id",
"!=",
"cast_status",
".",
"transport_id",
"if",
"new_channel",
":",
"self",
".",
"disconnect_channel",
"(",
"self",
".",
"destination_id",
")",
"self",
".",
"app_namespaces",
"=",
"cast_status",
".",
"namespaces",
"self",
".",
"destination_id",
"=",
"cast_status",
".",
"transport_id",
"self",
".",
"session_id",
"=",
"cast_status",
".",
"session_id",
"if",
"new_channel",
":",
"# If any of the namespaces of the new app are supported",
"# we will automatically connect to it to receive updates",
"for",
"namespace",
"in",
"self",
".",
"app_namespaces",
":",
"if",
"namespace",
"in",
"self",
".",
"_handlers",
":",
"self",
".",
"_ensure_channel_connected",
"(",
"self",
".",
"destination_id",
")",
"self",
".",
"_handlers",
"[",
"namespace",
"]",
".",
"channel_connected",
"(",
")"
] |
Activates overlay with browser optionally opened at a given page .
|
def activate ( self , page = None ) : page = page or '' if '://' in page : self . _iface . activate_overlay_url ( page ) else : self . _iface . activate_overlay_game ( page )
| 2,941
|
https://github.com/idlesign/steampak/blob/cb3f2c737e272b0360802d947e388df7e34f50f3/steampak/libsteam/resources/overlay.py#L30-L43
|
[
"def",
"clean_for_storage",
"(",
"self",
",",
"data",
")",
":",
"data",
"=",
"self",
".",
"data_to_unicode",
"(",
"data",
")",
"if",
"isinstance",
"(",
"data",
",",
"dict",
")",
":",
"for",
"k",
"in",
"dict",
"(",
"data",
")",
".",
"keys",
"(",
")",
":",
"if",
"k",
"==",
"'_id'",
":",
"del",
"data",
"[",
"k",
"]",
"continue",
"if",
"'.'",
"in",
"k",
":",
"new_k",
"=",
"k",
".",
"replace",
"(",
"'.'",
",",
"'_'",
")",
"data",
"[",
"new_k",
"]",
"=",
"data",
"[",
"k",
"]",
"del",
"data",
"[",
"k",
"]",
"k",
"=",
"new_k",
"if",
"isinstance",
"(",
"data",
"[",
"k",
"]",
",",
"dict",
")",
":",
"data",
"[",
"k",
"]",
"=",
"self",
".",
"clean_for_storage",
"(",
"data",
"[",
"k",
"]",
")",
"elif",
"isinstance",
"(",
"data",
"[",
"k",
"]",
",",
"list",
")",
":",
"data",
"[",
"k",
"]",
"=",
"[",
"self",
".",
"clean_for_storage",
"(",
"item",
")",
"for",
"item",
"in",
"data",
"[",
"k",
"]",
"]",
"return",
"data"
] |
At least one of the items in value should match
|
def any_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationAny ( value )
| 2,942
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L33-L39
|
[
"def",
"construct_error_message",
"(",
"driver_id",
",",
"error_type",
",",
"message",
",",
"timestamp",
")",
":",
"builder",
"=",
"flatbuffers",
".",
"Builder",
"(",
"0",
")",
"driver_offset",
"=",
"builder",
".",
"CreateString",
"(",
"driver_id",
".",
"binary",
"(",
")",
")",
"error_type_offset",
"=",
"builder",
".",
"CreateString",
"(",
"error_type",
")",
"message_offset",
"=",
"builder",
".",
"CreateString",
"(",
"message",
")",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataStart",
"(",
"builder",
")",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataAddDriverId",
"(",
"builder",
",",
"driver_offset",
")",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataAddType",
"(",
"builder",
",",
"error_type_offset",
")",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataAddErrorMessage",
"(",
"builder",
",",
"message_offset",
")",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataAddTimestamp",
"(",
"builder",
",",
"timestamp",
")",
"error_data_offset",
"=",
"ray",
".",
"core",
".",
"generated",
".",
"ErrorTableData",
".",
"ErrorTableDataEnd",
"(",
"builder",
")",
"builder",
".",
"Finish",
"(",
"error_data_offset",
")",
"return",
"bytes",
"(",
"builder",
".",
"Output",
"(",
")",
")"
] |
All the items in value should match
|
def all_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationAll ( value )
| 2,943
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L42-L48
|
[
"def",
"tofile",
"(",
"self",
",",
"filename",
",",
"format",
"=",
"'ascii'",
")",
":",
"if",
"not",
"common",
".",
"is_string",
"(",
"filename",
")",
":",
"raise",
"TypeError",
"(",
"'argument filename must be string but got %s'",
"%",
"(",
"type",
"(",
"filename",
")",
")",
")",
"if",
"format",
"not",
"in",
"[",
"'ascii'",
",",
"'binary'",
"]",
":",
"raise",
"TypeError",
"(",
"'argument format must be ascii | binary'",
")",
"filename",
"=",
"filename",
".",
"strip",
"(",
")",
"if",
"not",
"filename",
":",
"raise",
"ValueError",
"(",
"'filename must be non-empty string'",
")",
"if",
"filename",
"[",
"-",
"4",
":",
"]",
"!=",
"'.vtk'",
":",
"filename",
"+=",
"'.vtk'",
"f",
"=",
"open",
"(",
"filename",
",",
"'wb'",
")",
"f",
".",
"write",
"(",
"self",
".",
"to_string",
"(",
"format",
")",
")",
"f",
".",
"close",
"(",
")"
] |
None of the items in value should match
|
def none_of ( value , * args ) : if len ( args ) : value = ( value , ) + args return ExpectationNone ( value )
| 2,944
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/dsl.py#L51-L57
|
[
"def",
"tofile",
"(",
"self",
",",
"filename",
",",
"format",
"=",
"'ascii'",
")",
":",
"if",
"not",
"common",
".",
"is_string",
"(",
"filename",
")",
":",
"raise",
"TypeError",
"(",
"'argument filename must be string but got %s'",
"%",
"(",
"type",
"(",
"filename",
")",
")",
")",
"if",
"format",
"not",
"in",
"[",
"'ascii'",
",",
"'binary'",
"]",
":",
"raise",
"TypeError",
"(",
"'argument format must be ascii | binary'",
")",
"filename",
"=",
"filename",
".",
"strip",
"(",
")",
"if",
"not",
"filename",
":",
"raise",
"ValueError",
"(",
"'filename must be non-empty string'",
")",
"if",
"filename",
"[",
"-",
"4",
":",
"]",
"!=",
"'.vtk'",
":",
"filename",
"+=",
"'.vtk'",
"f",
"=",
"open",
"(",
"filename",
",",
"'wb'",
")",
"f",
".",
"write",
"(",
"self",
".",
"to_string",
"(",
"format",
")",
")",
"f",
".",
"close",
"(",
")"
] |
Runs cutadapt on the input RNA fastq files .
|
def run_cutadapt ( job , fastqs , univ_options , cutadapt_options ) : work_dir = os . getcwd ( ) input_files = { 'rna_1.fastq' : fastqs [ 0 ] , 'rna_2.fastq' : fastqs [ 1 ] } input_files = get_files_from_filestore ( job , input_files , work_dir , docker = False ) # Handle gzipped file gz = '.gz' if is_gzipfile ( input_files [ 'rna_1.fastq' ] ) else '' if gz : for read_file in 'rna_1.fastq' , 'rna_2.fastq' : os . symlink ( read_file , read_file + gz ) input_files [ read_file + gz ] = input_files [ read_file ] + gz input_files = { key : docker_path ( path ) for key , path in input_files . items ( ) } parameters = [ '-a' , cutadapt_options [ 'a' ] , # Fwd read 3' adapter '-A' , cutadapt_options [ 'A' ] , # Rev read 3' adapter '-m' , '35' , # Minimum size of read '-o' , docker_path ( 'rna_cutadapt_1.fastq.gz' ) , # Output for R1 '-p' , docker_path ( 'rna_cutadapt_2.fastq.gz' ) , # Output for R2 input_files [ 'rna_1.fastq' + gz ] , input_files [ 'rna_2.fastq' + gz ] ] docker_call ( tool = 'cutadapt' , tool_parameters = parameters , work_dir = work_dir , dockerhub = univ_options [ 'dockerhub' ] , tool_version = cutadapt_options [ 'version' ] ) output_files = [ ] for fastq_file in [ 'rna_cutadapt_1.fastq.gz' , 'rna_cutadapt_2.fastq.gz' ] : output_files . append ( job . fileStore . writeGlobalFile ( '/' . join ( [ work_dir , fastq_file ] ) ) ) job . fileStore . logToMaster ( 'Ran cutadapt on %s successfully' % univ_options [ 'patient' ] ) return output_files
| 2,945
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/qc/rna.py#L29-L64
|
[
"def",
"list_blobs",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"# pylint: disable=unused-argument",
"if",
"kwargs",
"is",
"None",
":",
"kwargs",
"=",
"{",
"}",
"if",
"'container'",
"not",
"in",
"kwargs",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'A container must be specified'",
")",
"storageservice",
"=",
"_get_block_blob_service",
"(",
"kwargs",
")",
"ret",
"=",
"{",
"}",
"try",
":",
"for",
"blob",
"in",
"storageservice",
".",
"list_blobs",
"(",
"kwargs",
"[",
"'container'",
"]",
")",
".",
"items",
":",
"ret",
"[",
"blob",
".",
"name",
"]",
"=",
"{",
"'blob_type'",
":",
"blob",
".",
"properties",
".",
"blob_type",
",",
"'last_modified'",
":",
"blob",
".",
"properties",
".",
"last_modified",
".",
"isoformat",
"(",
")",
",",
"'server_encrypted'",
":",
"blob",
".",
"properties",
".",
"server_encrypted",
",",
"}",
"except",
"Exception",
"as",
"exc",
":",
"log",
".",
"warning",
"(",
"six",
".",
"text_type",
"(",
"exc",
")",
")",
"return",
"ret"
] |
Basic test view .
|
def index ( ) : identity = g . identity actions = { } for action in access . actions . values ( ) : actions [ action . value ] = DynamicPermission ( action ) . allows ( identity ) if current_user . is_anonymous : return render_template ( "invenio_access/open.html" , actions = actions , identity = identity ) else : return render_template ( "invenio_access/limited.html" , message = '' , actions = actions , identity = identity )
| 2,946
|
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/examples/app.py#L109-L124
|
[
"def",
"indication",
"(",
"self",
",",
"apdu",
")",
":",
"if",
"_debug",
":",
"ServerSSM",
".",
"_debug",
"(",
"\"indication %r\"",
",",
"apdu",
")",
"if",
"self",
".",
"state",
"==",
"IDLE",
":",
"self",
".",
"idle",
"(",
"apdu",
")",
"elif",
"self",
".",
"state",
"==",
"SEGMENTED_REQUEST",
":",
"self",
".",
"segmented_request",
"(",
"apdu",
")",
"elif",
"self",
".",
"state",
"==",
"AWAIT_RESPONSE",
":",
"self",
".",
"await_response",
"(",
"apdu",
")",
"elif",
"self",
".",
"state",
"==",
"SEGMENTED_RESPONSE",
":",
"self",
".",
"segmented_response",
"(",
"apdu",
")",
"else",
":",
"if",
"_debug",
":",
"ServerSSM",
".",
"_debug",
"(",
"\" - invalid state\"",
")"
] |
View only allowed to admin role .
|
def role_admin ( ) : identity = g . identity actions = { } for action in access . actions . values ( ) : actions [ action . value ] = DynamicPermission ( action ) . allows ( identity ) message = 'You are opening a page requiring the "admin-access" permission' return render_template ( "invenio_access/limited.html" , message = message , actions = actions , identity = identity )
| 2,947
|
https://github.com/inveniosoftware/invenio-access/blob/3b033a4bdc110eb2f7e9f08f0744a780884bfc80/examples/app.py#L134-L145
|
[
"def",
"cudnnSetPooling2dDescriptor",
"(",
"poolingDesc",
",",
"mode",
",",
"windowHeight",
",",
"windowWidth",
",",
"verticalPadding",
",",
"horizontalPadding",
",",
"verticalStride",
",",
"horizontalStride",
")",
":",
"status",
"=",
"_libcudnn",
".",
"cudnnSetPooling2dDescriptor",
"(",
"poolingDesc",
",",
"mode",
",",
"windowHeight",
",",
"windowWidth",
",",
"verticalPadding",
",",
"horizontalPadding",
",",
"verticalStride",
",",
"horizontalStride",
")",
"cudnnCheckStatus",
"(",
"status",
")"
] |
Read the tumor and normal fastas into a joint dict .
|
def read_fastas ( input_files ) : tumor_file = [ y for x , y in input_files . items ( ) if x . startswith ( 'T' ) ] [ 0 ] normal_file = [ y for x , y in input_files . items ( ) if x . startswith ( 'N' ) ] [ 0 ] output_files = defaultdict ( list ) output_files = _read_fasta ( tumor_file , output_files ) num_entries = len ( output_files ) output_files = _read_fasta ( normal_file , output_files ) assert len ( output_files ) == num_entries return output_files
| 2,948
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L140-L155
|
[
"def",
"remove_server",
"(",
"self",
",",
"server_id",
")",
":",
"# Validate server_id",
"server",
"=",
"self",
".",
"_get_server",
"(",
"server_id",
")",
"# Delete any instances we recorded to be cleaned up",
"if",
"server_id",
"in",
"self",
".",
"_owned_subscriptions",
":",
"inst_list",
"=",
"self",
".",
"_owned_subscriptions",
"[",
"server_id",
"]",
"# We iterate backwards because we change the list",
"for",
"i",
"in",
"six",
".",
"moves",
".",
"range",
"(",
"len",
"(",
"inst_list",
")",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"inst",
"=",
"inst_list",
"[",
"i",
"]",
"server",
".",
"conn",
".",
"DeleteInstance",
"(",
"inst",
".",
"path",
")",
"del",
"inst_list",
"[",
"i",
"]",
"del",
"self",
".",
"_owned_subscriptions",
"[",
"server_id",
"]",
"if",
"server_id",
"in",
"self",
".",
"_owned_filters",
":",
"inst_list",
"=",
"self",
".",
"_owned_filters",
"[",
"server_id",
"]",
"# We iterate backwards because we change the list",
"for",
"i",
"in",
"six",
".",
"moves",
".",
"range",
"(",
"len",
"(",
"inst_list",
")",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"inst",
"=",
"inst_list",
"[",
"i",
"]",
"server",
".",
"conn",
".",
"DeleteInstance",
"(",
"inst",
".",
"path",
")",
"del",
"inst_list",
"[",
"i",
"]",
"del",
"self",
".",
"_owned_filters",
"[",
"server_id",
"]",
"if",
"server_id",
"in",
"self",
".",
"_owned_destinations",
":",
"inst_list",
"=",
"self",
".",
"_owned_destinations",
"[",
"server_id",
"]",
"# We iterate backwards because we change the list",
"for",
"i",
"in",
"six",
".",
"moves",
".",
"range",
"(",
"len",
"(",
"inst_list",
")",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"inst",
"=",
"inst_list",
"[",
"i",
"]",
"server",
".",
"conn",
".",
"DeleteInstance",
"(",
"inst",
".",
"path",
")",
"del",
"inst_list",
"[",
"i",
"]",
"del",
"self",
".",
"_owned_destinations",
"[",
"server_id",
"]",
"# Remove server from this listener",
"del",
"self",
".",
"_servers",
"[",
"server_id",
"]"
] |
Read the peptide fasta into an existing dict .
|
def _read_fasta ( fasta_file , output_dict ) : read_name = None with open ( fasta_file , 'r' ) as f : for line in f : line = line . strip ( ) if not line : continue if line . startswith ( '>' ) : read_name = line . lstrip ( '>' ) else : assert read_name is not None , line output_dict [ read_name ] . append ( line . strip ( ) ) return output_dict
| 2,949
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L158-L178
|
[
"def",
"bucket",
"(",
"self",
",",
"experiment",
",",
"user_id",
",",
"bucketing_id",
")",
":",
"if",
"not",
"experiment",
":",
"return",
"None",
"# Determine if experiment is in a mutually exclusive group",
"if",
"experiment",
".",
"groupPolicy",
"in",
"GROUP_POLICIES",
":",
"group",
"=",
"self",
".",
"config",
".",
"get_group",
"(",
"experiment",
".",
"groupId",
")",
"if",
"not",
"group",
":",
"return",
"None",
"user_experiment_id",
"=",
"self",
".",
"find_bucket",
"(",
"bucketing_id",
",",
"experiment",
".",
"groupId",
",",
"group",
".",
"trafficAllocation",
")",
"if",
"not",
"user_experiment_id",
":",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in no experiment.'",
"%",
"user_id",
")",
"return",
"None",
"if",
"user_experiment_id",
"!=",
"experiment",
".",
"id",
":",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is not in experiment \"%s\" of group %s.'",
"%",
"(",
"user_id",
",",
"experiment",
".",
"key",
",",
"experiment",
".",
"groupId",
")",
")",
"return",
"None",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in experiment %s of group %s.'",
"%",
"(",
"user_id",
",",
"experiment",
".",
"key",
",",
"experiment",
".",
"groupId",
")",
")",
"# Bucket user if not in white-list and in group (if any)",
"variation_id",
"=",
"self",
".",
"find_bucket",
"(",
"bucketing_id",
",",
"experiment",
".",
"id",
",",
"experiment",
".",
"trafficAllocation",
")",
"if",
"variation_id",
":",
"variation",
"=",
"self",
".",
"config",
".",
"get_variation_from_id",
"(",
"experiment",
".",
"key",
",",
"variation_id",
")",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in variation \"%s\" of experiment %s.'",
"%",
"(",
"user_id",
",",
"variation",
".",
"key",
",",
"experiment",
".",
"key",
")",
")",
"return",
"variation",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in no variation.'",
"%",
"user_id",
")",
"return",
"None"
] |
Process the results from running IEDB MHCII binding predictions using the consensus method into a pandas dataframe .
|
def _process_consensus_mhcii ( mhc_file , normal = False ) : core_col = None # Variable to hold the column number with the core results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) for line in mf : # Skip header lines if not line . startswith ( 'HLA' ) : continue line = line . strip ( ) . split ( '\t' ) allele = line [ 0 ] pept = line [ 4 ] pred = line [ 6 ] if core_col : core = line [ core_col ] else : methods = line [ 5 ] . lstrip ( 'Consensus(' ) . rstrip ( ')' ) methods = methods . split ( ',' ) if 'NN' in methods : core_col = 13 elif 'netMHCIIpan' in methods : core_col = 17 elif 'Sturniolo' in methods : core_col = 19 elif 'SMM' in methods : core_col = 10 core = line [ core_col ] if core_col else 'NOCORE' if float ( pred ) > 5.00 and not normal : continue results . loc [ len ( results ) ] = [ allele , pept , pred , core ] results . drop_duplicates ( inplace = True ) return results
| 2,950
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L181-L221
|
[
"def",
"exclude_types",
"(",
"self",
",",
"*",
"objs",
")",
":",
"for",
"o",
"in",
"objs",
":",
"for",
"t",
"in",
"_keytuple",
"(",
"o",
")",
":",
"if",
"t",
"and",
"t",
"not",
"in",
"self",
".",
"_excl_d",
":",
"self",
".",
"_excl_d",
"[",
"t",
"]",
"=",
"0"
] |
Process the results from running NetMHCIIpan binding predictions into a pandas dataframe .
|
def _process_net_mhcii ( mhc_file , normal = False ) : results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' , 'peptide_name' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) # Get the allele from the first line and skip the second line allele = re . sub ( '-DQB' , '/DQB' , mf . readline ( ) . strip ( ) ) _ = mf . readline ( ) for line in mf : line = line . strip ( ) . split ( '\t' ) pept = line [ 1 ] pred = line [ 5 ] core = 'NOCORE' peptide_name = line [ 2 ] if float ( pred ) > 5.00 and not normal : continue results . loc [ len ( results ) ] = [ allele , pept , pred , core , peptide_name ] results . drop_duplicates ( inplace = True ) return results
| 2,951
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L253-L278
|
[
"def",
"teardown",
"(",
"self",
")",
":",
"self",
".",
"ensure_all_devices_in_sync",
"(",
")",
"for",
"device",
"in",
"self",
".",
"devices",
":",
"self",
".",
"_delete_device_from_device_group",
"(",
"device",
")",
"self",
".",
"_sync_to_group",
"(",
"device",
")",
"pollster",
"(",
"self",
".",
"_ensure_device_active",
")",
"(",
"device",
")",
"self",
".",
"ensure_all_devices_in_sync",
"(",
")",
"dg",
"=",
"pollster",
"(",
"self",
".",
"_get_device_group",
")",
"(",
"self",
".",
"devices",
"[",
"0",
"]",
")",
"dg",
".",
"delete",
"(",
")",
"pollster",
"(",
"self",
".",
"_check_devices_active_licensed",
")",
"(",
")",
"pollster",
"(",
"self",
".",
"_check_all_devices_in_sync",
")",
"(",
")"
] |
Process the results from running IEDB MHCI binding predictions into a pandas dataframe .
|
def _process_mhci ( mhc_file , normal = False ) : results = pandas . DataFrame ( columns = [ 'allele' , 'pept' , 'tumor_pred' , 'core' ] ) with open ( mhc_file , 'r' ) as mf : peptides = set ( ) for line in mf : # Skip header lines if not line . startswith ( 'HLA' ) : continue line = line . strip ( ) . split ( '\t' ) allele = line [ 0 ] pept = line [ 5 ] pred = line [ 7 ] if float ( pred ) > 5.00 and not normal : continue results . loc [ len ( results ) ] = [ allele , pept , pred , pept ] results . drop_duplicates ( inplace = True ) return results
| 2,952
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L281-L305
|
[
"def",
"saturation",
"(",
"self",
",",
"value",
")",
":",
"value",
"=",
"clean_float",
"(",
"value",
")",
"if",
"value",
"is",
"None",
":",
"return",
"try",
":",
"unit_moisture_weight",
"=",
"self",
".",
"unit_moist_weight",
"-",
"self",
".",
"unit_dry_weight",
"unit_moisture_volume",
"=",
"unit_moisture_weight",
"/",
"self",
".",
"_pw",
"saturation",
"=",
"unit_moisture_volume",
"/",
"self",
".",
"_calc_unit_void_volume",
"(",
")",
"if",
"saturation",
"is",
"not",
"None",
"and",
"not",
"ct",
".",
"isclose",
"(",
"saturation",
",",
"value",
",",
"rel_tol",
"=",
"self",
".",
"_tolerance",
")",
":",
"raise",
"ModelError",
"(",
"\"New saturation (%.3f) is inconsistent \"",
"\"with calculated value (%.3f)\"",
"%",
"(",
"value",
",",
"saturation",
")",
")",
"except",
"TypeError",
":",
"pass",
"old_value",
"=",
"self",
".",
"saturation",
"self",
".",
"_saturation",
"=",
"value",
"try",
":",
"self",
".",
"recompute_all_weights_and_void",
"(",
")",
"self",
".",
"_add_to_stack",
"(",
"\"saturation\"",
",",
"value",
")",
"except",
"ModelError",
"as",
"e",
":",
"self",
".",
"_saturation",
"=",
"old_value",
"raise",
"ModelError",
"(",
"e",
")"
] |
Return the number of differences betweeen 2 peptides
|
def pept_diff ( p1 , p2 ) : if len ( p1 ) != len ( p2 ) : return - 1 else : return sum ( [ p1 [ i ] != p2 [ i ] for i in range ( len ( p1 ) ) ] )
| 2,953
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L308-L329
|
[
"def",
"restore",
"(",
"self",
",",
"state",
")",
":",
"storage_data",
"=",
"state",
".",
"get",
"(",
"u'storage_data'",
",",
"[",
"]",
")",
"streaming_data",
"=",
"state",
".",
"get",
"(",
"u'streaming_data'",
",",
"[",
"]",
")",
"if",
"len",
"(",
"storage_data",
")",
">",
"self",
".",
"storage_length",
"or",
"len",
"(",
"streaming_data",
")",
">",
"self",
".",
"streaming_length",
":",
"raise",
"ArgumentError",
"(",
"\"Cannot restore InMemoryStorageEngine, too many readings\"",
",",
"storage_size",
"=",
"len",
"(",
"storage_data",
")",
",",
"storage_max",
"=",
"self",
".",
"storage_length",
",",
"streaming_size",
"=",
"len",
"(",
"streaming_data",
")",
",",
"streaming_max",
"=",
"self",
".",
"streaming_length",
")",
"self",
".",
"storage_data",
"=",
"[",
"IOTileReading",
".",
"FromDict",
"(",
"x",
")",
"for",
"x",
"in",
"storage_data",
"]",
"self",
".",
"streaming_data",
"=",
"[",
"IOTileReading",
".",
"FromDict",
"(",
"x",
")",
"for",
"x",
"in",
"streaming_data",
"]"
] |
Accept data about one neoepitope from merge_mhc_peptide_calls and print it to outfile . This is a generic module to reduce code redundancy .
|
def print_mhc_peptide ( neoepitope_info , peptides , pepmap , outfile , netmhc = False ) : if netmhc : peptide_names = [ neoepitope_info . peptide_name ] else : peptide_names = [ x for x , y in peptides . items ( ) if neoepitope_info . pept in y ] # Convert named tuple to dict so it can be modified neoepitope_info = neoepitope_info . _asdict ( ) # Handle fusion peptides (They are characterized by having all N's as the normal partner) if neoepitope_info [ 'normal_pept' ] == 'N' * len ( neoepitope_info [ 'pept' ] ) : neoepitope_info [ 'normal_pept' ] = neoepitope_info [ 'normal_pred' ] = 'NA' # For each peptide, append the ensembl gene for peptide_name in peptide_names : print ( '{ni[allele]}\t' '{ni[pept]}\t' '{ni[normal_pept]}\t' '{pname}\t' '{ni[core]}\t' '0\t' '{ni[tumor_pred]}\t' '{ni[normal_pred]}\t' '{pmap}' . format ( ni = neoepitope_info , pname = peptide_name , pmap = pepmap [ peptide_name ] ) , file = outfile ) return None
| 2,954
|
https://github.com/BD2KGenomics/protect/blob/06310682c50dcf8917b912c8e551299ff7ee41ce/src/protect/binding_prediction/common.py#L581-L615
|
[
"def",
"model_stats",
"(",
"self",
")",
":",
"stats",
"=",
"self",
".",
"model",
".",
"default_stats",
"return",
"blob_data_to_dict",
"(",
"stats",
",",
"self",
".",
"_sampler",
".",
"blobs",
")"
] |
Check the ownership of a domain by going thru a serie of strategies . If at least one strategy succeed the domain is considered verified and this methods returns true .
|
def check ( domain , prefix , code , strategies = '*' ) : if strategies == '*' or 'dns_txt' in strategies : if check_dns_txt ( domain , prefix , code ) : return True if strategies == '*' or 'dns_cname' in strategies : if check_dns_cname ( domain , prefix , code ) : return True if strategies == '*' or 'meta_tag' in strategies : if check_meta_tag ( domain , prefix , code ) : return True if strategies == '*' or 'html_file' in strategies : if check_html_file ( domain , prefix , code ) : return True return False
| 2,955
|
https://github.com/rs/domcheck/blob/43e10c345320564a1236778e8577e2b8ef825925/domcheck/__init__.py#L6-L34
|
[
"def",
"strip_comments",
"(",
"text",
")",
":",
"regex",
"=",
"r'\\s*(#|\\/{2}).*$'",
"regex_inline",
"=",
"r'(:?(?:\\s)*([A-Za-z\\d\\.{}]*)|((?<=\\\").*\\\"),?)(?:\\s)*(((#|(\\/{2})).*)|)$'",
"# noqa",
"lines",
"=",
"text",
".",
"split",
"(",
"'\\n'",
")",
"for",
"index",
",",
"line",
"in",
"enumerate",
"(",
"lines",
")",
":",
"if",
"re",
".",
"search",
"(",
"regex",
",",
"line",
")",
":",
"if",
"re",
".",
"search",
"(",
"r'^'",
"+",
"regex",
",",
"line",
",",
"re",
".",
"IGNORECASE",
")",
":",
"lines",
"[",
"index",
"]",
"=",
"\"\"",
"elif",
"re",
".",
"search",
"(",
"regex_inline",
",",
"line",
")",
":",
"lines",
"[",
"index",
"]",
"=",
"re",
".",
"sub",
"(",
"regex_inline",
",",
"r'\\1'",
",",
"line",
")",
"return",
"'\\n'",
".",
"join",
"(",
"lines",
")"
] |
Register app in cache buster so that url_for adds a unique prefix to URLs generated for the static endpoint . Also make the app able to serve cache - busted static files .
|
def register_cache_buster ( self , app , config = None ) : if not ( config is None or isinstance ( config , dict ) ) : raise ValueError ( "`config` must be an instance of dict or None" ) bust_map = { } # map from an unbusted filename to a busted one unbust_map = { } # map from a busted filename to an unbusted one # http://flask.pocoo.org/docs/0.12/api/#flask.Flask.static_folder app . logger . debug ( 'Starting computing hashes for static assets' ) # compute (un)bust tables. for dirpath , dirnames , filenames in os . walk ( app . static_folder ) : for filename in filenames : # compute version component rooted_filename = os . path . join ( dirpath , filename ) if not self . __is_file_to_be_busted ( rooted_filename ) : continue app . logger . debug ( f'Computing hashes for {rooted_filename}' ) with open ( rooted_filename , 'rb' ) as f : version = hashlib . md5 ( f . read ( ) ) . hexdigest ( ) [ : self . hash_size ] # add version unbusted = os . path . relpath ( rooted_filename , app . static_folder ) # busted = os.path.join(version, unbusted) busted = f"{unbusted}?q={version}" # save computation to map bust_map [ unbusted ] = busted unbust_map [ busted ] = unbusted app . logger . debug ( 'Finished Starting computing hashes for static assets' ) def bust_filename ( file ) : return bust_map . get ( file , file ) def unbust_filename ( file ) : return unbust_map . get ( file , file ) @ app . url_defaults def reverse_to_cache_busted_url ( endpoint , values ) : """
Make `url_for` produce busted filenames when using the 'static'
endpoint.
""" if endpoint == 'static' : values [ 'filename' ] = bust_filename ( values [ 'filename' ] ) def debusting_static_view ( * args , * * kwargs ) : """
Serve a request for a static file having a busted name.
""" kwargs [ 'filename' ] = unbust_filename ( kwargs . get ( 'filename' ) ) return original_static_view ( * args , * * kwargs ) # Replace the default static file view with our debusting view. original_static_view = app . view_functions [ 'static' ] app . view_functions [ 'static' ] = debusting_static_view
| 2,956
|
https://github.com/daxlab/Flask-Cache-Buster/blob/4c10bed9ab46020904df565a9c0014a7f2e4f6b3/flask_cache_buster/__init__.py#L29-L93
|
[
"def",
"_copy_future_state",
"(",
"source",
",",
"dest",
")",
":",
"assert",
"source",
".",
"done",
"(",
")",
"if",
"dest",
".",
"cancelled",
"(",
")",
":",
"return",
"assert",
"not",
"dest",
".",
"done",
"(",
")",
"if",
"source",
".",
"cancelled",
"(",
")",
":",
"dest",
".",
"cancel",
"(",
")",
"else",
":",
"exception",
"=",
"source",
".",
"exception",
"(",
")",
"if",
"exception",
"is",
"not",
"None",
":",
"dest",
".",
"set_exception",
"(",
"exception",
")",
"else",
":",
"result",
"=",
"source",
".",
"result",
"(",
")",
"dest",
".",
"set_result",
"(",
"result",
")"
] |
Get environment variable or provide default .
|
def env_or_default ( var , default = None ) : if var in os . environ : return os . environ [ var ] return default
| 2,957
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/utils.py#L11-L20
|
[
"def",
"numpy_to_weld_type",
"(",
"np_dtype",
")",
":",
"if",
"not",
"isinstance",
"(",
"np_dtype",
",",
"(",
"str",
",",
"bytes",
",",
"np",
".",
"dtype",
",",
"type",
")",
")",
":",
"raise",
"TypeError",
"(",
"'Can only convert np.dtype or str'",
")",
"if",
"isinstance",
"(",
"np_dtype",
",",
"(",
"str",
",",
"bytes",
",",
"type",
")",
")",
":",
"np_dtype",
"=",
"np",
".",
"dtype",
"(",
"np_dtype",
")",
"return",
"_numpy_to_weld_type_mapping",
"[",
"np_dtype",
".",
"char",
"]"
] |
Encrypt and value with KMS key .
|
def kms_encrypt ( value , key , aws_config = None ) : aws_config = aws_config or { } aws = boto3 . session . Session ( * * aws_config ) client = aws . client ( 'kms' ) enc_res = client . encrypt ( KeyId = key , Plaintext = value ) return n ( b64encode ( enc_res [ 'CiphertextBlob' ] ) )
| 2,958
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/utils.py#L23-L44
|
[
"def",
"writearff",
"(",
"data",
",",
"filename",
",",
"relation_name",
"=",
"None",
",",
"index",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"filename",
",",
"str",
")",
":",
"fp",
"=",
"open",
"(",
"filename",
",",
"'w'",
")",
"if",
"relation_name",
"is",
"None",
":",
"relation_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
"else",
":",
"fp",
"=",
"filename",
"if",
"relation_name",
"is",
"None",
":",
"relation_name",
"=",
"\"pandas\"",
"try",
":",
"data",
"=",
"_write_header",
"(",
"data",
",",
"fp",
",",
"relation_name",
",",
"index",
")",
"fp",
".",
"write",
"(",
"\"\\n\"",
")",
"_write_data",
"(",
"data",
",",
"fp",
")",
"finally",
":",
"fp",
".",
"close",
"(",
")"
] |
Get from config object by exposing Config . get_value method .
|
def get_value ( * args , * * kwargs ) : global _config if _config is None : raise ValueError ( 'configuration not set; must run figgypy.set_config first' ) return _config . get_value ( * args , * * kwargs )
| 2,959
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/__init__.py#L27-L35
|
[
"def",
"_get_generic_two_antidep_episodes_result",
"(",
"rowdata",
":",
"Tuple",
"[",
"Any",
",",
"...",
"]",
"=",
"None",
")",
"->",
"DataFrame",
":",
"# Valid data types... see:",
"# - pandas.core.dtypes.common.pandas_dtype",
"# - https://pandas.pydata.org/pandas-docs/stable/timeseries.html",
"# - https://docs.scipy.org/doc/numpy-1.13.0/reference/arrays.datetime.html",
"data",
"=",
"[",
"rowdata",
"]",
"if",
"rowdata",
"else",
"[",
"]",
"return",
"DataFrame",
"(",
"array",
"(",
"data",
",",
"# data",
"dtype",
"=",
"[",
"# column definitions:",
"(",
"RCN_PATIENT_ID",
",",
"DTYPE_STRING",
")",
",",
"(",
"RCN_DRUG_A_NAME",
",",
"DTYPE_STRING",
")",
",",
"(",
"RCN_DRUG_A_FIRST_MENTION",
",",
"DTYPE_DATE",
")",
",",
"(",
"RCN_DRUG_A_SECOND_MENTION",
",",
"DTYPE_DATE",
")",
",",
"(",
"RCN_DRUG_B_NAME",
",",
"DTYPE_STRING",
")",
",",
"(",
"RCN_DRUG_B_FIRST_MENTION",
",",
"DTYPE_DATE",
")",
",",
"(",
"RCN_DRUG_B_SECOND_MENTION",
",",
"DTYPE_DATE",
")",
",",
"(",
"RCN_EXPECT_RESPONSE_BY_DATE",
",",
"DTYPE_DATE",
")",
",",
"(",
"RCN_END_OF_SYMPTOM_PERIOD",
",",
"DTYPE_DATE",
")",
",",
"]",
")",
")"
] |
Set value in the global Config object .
|
def set_value ( * args , * * kwargs ) : global _config if _config is None : raise ValueError ( 'configuration not set; must run figgypy.set_config first' ) return _config . set_value ( * args , * * kwargs )
| 2,960
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/__init__.py#L73-L78
|
[
"def",
"revoke_session",
"(",
"self",
",",
"sid",
"=",
"''",
",",
"token",
"=",
"''",
")",
":",
"if",
"not",
"sid",
":",
"if",
"token",
":",
"sid",
"=",
"self",
".",
"handler",
".",
"sid",
"(",
"token",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Need one of \"sid\" or \"token\"'",
")",
"for",
"typ",
"in",
"[",
"'access_token'",
",",
"'refresh_token'",
",",
"'code'",
"]",
":",
"try",
":",
"self",
".",
"revoke_token",
"(",
"self",
"[",
"sid",
"]",
"[",
"typ",
"]",
",",
"typ",
")",
"except",
"KeyError",
":",
"# If no such token has been issued",
"pass",
"self",
".",
"update",
"(",
"sid",
",",
"revoked",
"=",
"True",
")"
] |
Unescape libconfig string literals
|
def decode_escapes ( s ) : def decode_match ( match ) : return codecs . decode ( match . group ( 0 ) , 'unicode-escape' ) return ESCAPE_SEQUENCE_RE . sub ( decode_match , s )
| 2,961
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L50-L55
|
[
"def",
"_adapt_WSDateTime",
"(",
"dt",
")",
":",
"try",
":",
"ts",
"=",
"int",
"(",
"(",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"pytz",
".",
"utc",
")",
"-",
"datetime",
"(",
"1970",
",",
"1",
",",
"1",
",",
"tzinfo",
"=",
"pytz",
".",
"utc",
")",
")",
".",
"total_seconds",
"(",
")",
")",
"except",
"(",
"OverflowError",
",",
"OSError",
")",
":",
"if",
"dt",
"<",
"datetime",
".",
"now",
"(",
")",
":",
"ts",
"=",
"0",
"else",
":",
"ts",
"=",
"2",
"**",
"63",
"-",
"1",
"return",
"ts"
] |
Load the contents of string to a Python object
|
def loads ( string , filename = None , includedir = '' ) : try : f = io . StringIO ( string ) except TypeError : raise TypeError ( "libconf.loads() input string must by unicode" ) return load ( f , filename = filename , includedir = includedir )
| 2,962
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L501-L521
|
[
"def",
"interp_color",
"(",
"a",
",",
"b",
",",
"f",
")",
":",
"a_",
"=",
"(",
"a",
".",
"redF",
"(",
")",
",",
"a",
".",
"greenF",
"(",
")",
",",
"a",
".",
"blueF",
"(",
")",
")",
"b_",
"=",
"(",
"b",
".",
"redF",
"(",
")",
",",
"b",
".",
"greenF",
"(",
")",
",",
"b",
".",
"blueF",
"(",
")",
")",
"a_",
"=",
"[",
"x",
"*",
"(",
"1",
"-",
"f",
")",
"for",
"x",
"in",
"a_",
"]",
"b_",
"=",
"[",
"x",
"*",
"f",
"for",
"x",
"in",
"b_",
"]",
"c",
"=",
"[",
"x",
"+",
"y",
"for",
"x",
",",
"y",
"in",
"zip",
"(",
"a_",
",",
"b_",
")",
"]",
"return",
"QtGui",
".",
"QColor",
".",
"fromRgbF",
"(",
"*",
"c",
")"
] |
Stringize s adding double quotes and escaping as necessary
|
def dump_string ( s ) : s = ( s . replace ( '\\' , '\\\\' ) . replace ( '"' , '\\"' ) . replace ( '\f' , r'\f' ) . replace ( '\n' , r'\n' ) . replace ( '\r' , r'\r' ) . replace ( '\t' , r'\t' ) ) s = UNPRINTABLE_CHARACTER_RE . sub ( lambda m : r'\x{:02x}' . format ( ord ( m . group ( 0 ) ) ) , s ) return '"' + s + '"'
| 2,963
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L555-L572
|
[
"async",
"def",
"get_default_storage_layout",
"(",
"cls",
")",
"->",
"StorageLayout",
":",
"data",
"=",
"await",
"cls",
".",
"get_config",
"(",
"\"default_storage_layout\"",
")",
"return",
"cls",
".",
"StorageLayout",
".",
"lookup",
"(",
"data",
")"
] |
Get the libconfig datatype of a value
|
def get_dump_type ( value ) : if isinstance ( value , dict ) : return 'd' if isinstance ( value , tuple ) : return 'l' if isinstance ( value , list ) : return 'a' # Test bool before int since isinstance(True, int) == True. if isinstance ( value , bool ) : return 'b' if isint ( value ) : if is_long_int ( value ) : return 'i64' else : return 'i' if isinstance ( value , float ) : return 'f' if isstr ( value ) : return 's' return None
| 2,964
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L575-L606
|
[
"def",
"_index_audio_cmu",
"(",
"self",
",",
"basename",
"=",
"None",
",",
"replace_already_indexed",
"=",
"False",
")",
":",
"self",
".",
"_prepare_audio",
"(",
"basename",
"=",
"basename",
",",
"replace_already_indexed",
"=",
"replace_already_indexed",
")",
"for",
"staging_audio_basename",
"in",
"self",
".",
"_list_audio_files",
"(",
"sub_dir",
"=",
"\"staging\"",
")",
":",
"original_audio_name",
"=",
"''",
".",
"join",
"(",
"staging_audio_basename",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"[",
":",
"-",
"3",
"]",
"pocketsphinx_command",
"=",
"''",
".",
"join",
"(",
"[",
"\"pocketsphinx_continuous\"",
",",
"\"-infile\"",
",",
"str",
"(",
"\"{}/staging/{}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"staging_audio_basename",
")",
")",
",",
"\"-time\"",
",",
"\"yes\"",
",",
"\"-logfn\"",
",",
"\"/dev/null\"",
"]",
")",
"try",
":",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Now indexing {}\"",
".",
"format",
"(",
"staging_audio_basename",
")",
")",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"\"pocketsphinx_continuous\"",
",",
"\"-infile\"",
",",
"str",
"(",
"\"{}/staging/{}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"staging_audio_basename",
")",
")",
",",
"\"-time\"",
",",
"\"yes\"",
",",
"\"-logfn\"",
",",
"\"/dev/null\"",
"]",
",",
"universal_newlines",
"=",
"True",
")",
".",
"split",
"(",
"'\\n'",
")",
"str_timestamps_with_sil_conf",
"=",
"list",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"split",
"(",
"\" \"",
")",
",",
"filter",
"(",
"None",
",",
"output",
"[",
"1",
":",
"]",
")",
")",
")",
"# Timestamps are putted in a list of a single element. To match",
"# Watson's output.",
"self",
".",
"__timestamps_unregulated",
"[",
"original_audio_name",
"+",
"\".wav\"",
"]",
"=",
"[",
"(",
"self",
".",
"_timestamp_extractor_cmu",
"(",
"staging_audio_basename",
",",
"str_timestamps_with_sil_conf",
")",
")",
"]",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Done indexing {}\"",
".",
"format",
"(",
"staging_audio_basename",
")",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"e",
",",
"\"The command was: {}\"",
".",
"format",
"(",
"pocketsphinx_command",
")",
")",
"self",
".",
"__errors",
"[",
"(",
"time",
"(",
")",
",",
"staging_audio_basename",
")",
"]",
"=",
"e",
"self",
".",
"_timestamp_regulator",
"(",
")",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Finished indexing procedure\"",
")"
] |
Return array value type raise ConfigSerializeError for invalid arrays
|
def get_array_value_dtype ( lst ) : array_value_type = None for value in lst : dtype = get_dump_type ( value ) if dtype not in { 'b' , 'i' , 'i64' , 'f' , 's' } : raise ConfigSerializeError ( "Invalid datatype in array (may only contain scalars):" "%r of type %s" % ( value , type ( value ) ) ) if array_value_type is None : array_value_type = dtype continue if array_value_type == dtype : continue if array_value_type == 'i' and dtype == 'i64' : array_value_type = 'i64' continue if array_value_type == 'i64' and dtype == 'i' : continue raise ConfigSerializeError ( "Mixed types in array (all elements must have same type):" "%r of type %s" % ( value , type ( value ) ) ) return array_value_type
| 2,965
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L609-L646
|
[
"def",
"benchmark_forward",
"(",
"self",
")",
":",
"self",
".",
"_setup",
"(",
")",
"def",
"f",
"(",
")",
":",
"self",
".",
"_forward",
"(",
")",
"self",
".",
"mod_ext",
".",
"synchronize",
"(",
"*",
"*",
"self",
".",
"ext_kwargs",
")",
"f",
"(",
")",
"# Ignore first",
"self",
".",
"forward_stat",
"=",
"self",
".",
"_calc_benchmark_stat",
"(",
"f",
")"
] |
Save a value of any libconfig type
|
def dump_value ( key , value , f , indent = 0 ) : spaces = ' ' * indent if key is None : key_prefix = '' key_prefix_nl = '' else : key_prefix = key + ' = ' key_prefix_nl = key + ' =\n' + spaces dtype = get_dump_type ( value ) if dtype == 'd' : f . write ( u'{}{}{{\n' . format ( spaces , key_prefix_nl ) ) dump_dict ( value , f , indent + 4 ) f . write ( u'{}}}' . format ( spaces ) ) elif dtype == 'l' : f . write ( u'{}{}(\n' . format ( spaces , key_prefix_nl ) ) dump_collection ( value , f , indent + 4 ) f . write ( u'\n{})' . format ( spaces ) ) elif dtype == 'a' : f . write ( u'{}{}[\n' . format ( spaces , key_prefix_nl ) ) value_dtype = get_array_value_dtype ( value ) # If int array contains one or more Int64, promote all values to i64. if value_dtype == 'i64' : value = [ LibconfInt64 ( v ) for v in value ] dump_collection ( value , f , indent + 4 ) f . write ( u'\n{}]' . format ( spaces ) ) elif dtype == 's' : f . write ( u'{}{}{}' . format ( spaces , key_prefix , dump_string ( value ) ) ) elif dtype == 'i' or dtype == 'i64' : f . write ( u'{}{}{}' . format ( spaces , key_prefix , dump_int ( value ) ) ) elif dtype == 'f' or dtype == 'b' : f . write ( u'{}{}{}' . format ( spaces , key_prefix , value ) ) else : raise ConfigSerializeError ( "Can not serialize object %r of type %s" % ( value , type ( value ) ) )
| 2,966
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L649-L692
|
[
"def",
"_timestamp_regulator",
"(",
"self",
")",
":",
"unified_timestamps",
"=",
"_PrettyDefaultDict",
"(",
"list",
")",
"staged_files",
"=",
"self",
".",
"_list_audio_files",
"(",
"sub_dir",
"=",
"\"staging\"",
")",
"for",
"timestamp_basename",
"in",
"self",
".",
"__timestamps_unregulated",
":",
"if",
"len",
"(",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
")",
">",
"1",
":",
"# File has been splitted",
"timestamp_name",
"=",
"''",
".",
"join",
"(",
"timestamp_basename",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"staged_splitted_files_of_timestamp",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"staged_file",
":",
"(",
"timestamp_name",
"==",
"staged_file",
"[",
":",
"-",
"3",
"]",
"and",
"all",
"(",
"[",
"(",
"x",
"in",
"set",
"(",
"map",
"(",
"str",
",",
"range",
"(",
"10",
")",
")",
")",
")",
"for",
"x",
"in",
"staged_file",
"[",
"-",
"3",
":",
"]",
"]",
")",
")",
",",
"staged_files",
")",
")",
"if",
"len",
"(",
"staged_splitted_files_of_timestamp",
")",
"==",
"0",
":",
"self",
".",
"__errors",
"[",
"(",
"time",
"(",
")",
",",
"timestamp_basename",
")",
"]",
"=",
"{",
"\"reason\"",
":",
"\"Missing staged file\"",
",",
"\"current_staged_files\"",
":",
"staged_files",
"}",
"continue",
"staged_splitted_files_of_timestamp",
".",
"sort",
"(",
")",
"unified_timestamp",
"=",
"list",
"(",
")",
"for",
"staging_digits",
",",
"splitted_file",
"in",
"enumerate",
"(",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
")",
":",
"prev_splits_sec",
"=",
"0",
"if",
"int",
"(",
"staging_digits",
")",
"!=",
"0",
":",
"prev_splits_sec",
"=",
"self",
".",
"_get_audio_duration_seconds",
"(",
"\"{}/staging/{}{:03d}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"timestamp_name",
",",
"staging_digits",
"-",
"1",
")",
")",
"for",
"word_block",
"in",
"splitted_file",
":",
"unified_timestamp",
".",
"append",
"(",
"_WordBlock",
"(",
"word",
"=",
"word_block",
".",
"word",
",",
"start",
"=",
"round",
"(",
"word_block",
".",
"start",
"+",
"prev_splits_sec",
",",
"2",
")",
",",
"end",
"=",
"round",
"(",
"word_block",
".",
"end",
"+",
"prev_splits_sec",
",",
"2",
")",
")",
")",
"unified_timestamps",
"[",
"str",
"(",
"timestamp_basename",
")",
"]",
"+=",
"unified_timestamp",
"else",
":",
"unified_timestamps",
"[",
"timestamp_basename",
"]",
"+=",
"self",
".",
"__timestamps_unregulated",
"[",
"timestamp_basename",
"]",
"[",
"0",
"]",
"self",
".",
"__timestamps",
".",
"update",
"(",
"unified_timestamps",
")",
"self",
".",
"__timestamps_unregulated",
"=",
"_PrettyDefaultDict",
"(",
"list",
")"
] |
Save a collection of attributes
|
def dump_collection ( cfg , f , indent = 0 ) : for i , value in enumerate ( cfg ) : dump_value ( None , value , f , indent ) if i < len ( cfg ) - 1 : f . write ( u',\n' )
| 2,967
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L695-L701
|
[
"def",
"RepackAllTemplates",
"(",
"self",
",",
"upload",
"=",
"False",
",",
"token",
"=",
"None",
")",
":",
"for",
"template",
"in",
"os",
".",
"listdir",
"(",
"config",
".",
"CONFIG",
"[",
"\"ClientBuilder.template_dir\"",
"]",
")",
":",
"template_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"CONFIG",
"[",
"\"ClientBuilder.template_dir\"",
"]",
",",
"template",
")",
"self",
".",
"RepackTemplate",
"(",
"template_path",
",",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"CONFIG",
"[",
"\"ClientBuilder.executables_dir\"",
"]",
",",
"\"installers\"",
")",
",",
"upload",
"=",
"upload",
",",
"token",
"=",
"token",
")",
"# If it's windows also repack a debug version.",
"if",
"template_path",
".",
"endswith",
"(",
"\".exe.zip\"",
")",
":",
"print",
"(",
"\"Repacking as debug installer: %s.\"",
"%",
"template_path",
")",
"self",
".",
"RepackTemplate",
"(",
"template_path",
",",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"CONFIG",
"[",
"\"ClientBuilder.executables_dir\"",
"]",
",",
"\"installers\"",
")",
",",
"upload",
"=",
"upload",
",",
"token",
"=",
"token",
",",
"context",
"=",
"[",
"\"DebugClientBuild Context\"",
"]",
")"
] |
Save a dictionary of attributes
|
def dump_dict ( cfg , f , indent = 0 ) : for key in cfg : if not isstr ( key ) : raise ConfigSerializeError ( "Dict keys must be strings: %r" % ( key , ) ) dump_value ( key , cfg [ key ] , f , indent ) f . write ( u';\n' )
| 2,968
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L704-L712
|
[
"def",
"_parse_parallel_sentences",
"(",
"f1",
",",
"f2",
")",
":",
"def",
"_parse_text",
"(",
"path",
")",
":",
"\"\"\"Returns the sentences from a single text file, which may be gzipped.\"\"\"",
"split_path",
"=",
"path",
".",
"split",
"(",
"\".\"",
")",
"if",
"split_path",
"[",
"-",
"1",
"]",
"==",
"\"gz\"",
":",
"lang",
"=",
"split_path",
"[",
"-",
"2",
"]",
"with",
"tf",
".",
"io",
".",
"gfile",
".",
"GFile",
"(",
"path",
")",
"as",
"f",
",",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"f",
")",
"as",
"g",
":",
"return",
"g",
".",
"read",
"(",
")",
".",
"split",
"(",
"\"\\n\"",
")",
",",
"lang",
"if",
"split_path",
"[",
"-",
"1",
"]",
"==",
"\"txt\"",
":",
"# CWMT",
"lang",
"=",
"split_path",
"[",
"-",
"2",
"]",
".",
"split",
"(",
"\"_\"",
")",
"[",
"-",
"1",
"]",
"lang",
"=",
"\"zh\"",
"if",
"lang",
"in",
"(",
"\"ch\"",
",",
"\"cn\"",
")",
"else",
"lang",
"else",
":",
"lang",
"=",
"split_path",
"[",
"-",
"1",
"]",
"with",
"tf",
".",
"io",
".",
"gfile",
".",
"GFile",
"(",
"path",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")",
".",
"split",
"(",
"\"\\n\"",
")",
",",
"lang",
"def",
"_parse_sgm",
"(",
"path",
")",
":",
"\"\"\"Returns sentences from a single SGML file.\"\"\"",
"lang",
"=",
"path",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"2",
"]",
"sentences",
"=",
"[",
"]",
"# Note: We can't use the XML parser since some of the files are badly",
"# formatted.",
"seg_re",
"=",
"re",
".",
"compile",
"(",
"r\"<seg id=\\\"\\d+\\\">(.*)</seg>\"",
")",
"with",
"tf",
".",
"io",
".",
"gfile",
".",
"GFile",
"(",
"path",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"seg_match",
"=",
"re",
".",
"match",
"(",
"seg_re",
",",
"line",
")",
"if",
"seg_match",
":",
"assert",
"len",
"(",
"seg_match",
".",
"groups",
"(",
")",
")",
"==",
"1",
"sentences",
".",
"append",
"(",
"seg_match",
".",
"groups",
"(",
")",
"[",
"0",
"]",
")",
"return",
"sentences",
",",
"lang",
"parse_file",
"=",
"_parse_sgm",
"if",
"f1",
".",
"endswith",
"(",
"\".sgm\"",
")",
"else",
"_parse_text",
"# Some datasets (e.g., CWMT) contain multiple parallel files specified with",
"# a wildcard. We sort both sets to align them and parse them one by one.",
"f1_files",
"=",
"tf",
".",
"io",
".",
"gfile",
".",
"glob",
"(",
"f1",
")",
"f2_files",
"=",
"tf",
".",
"io",
".",
"gfile",
".",
"glob",
"(",
"f2",
")",
"assert",
"f1_files",
"and",
"f2_files",
",",
"\"No matching files found: %s, %s.\"",
"%",
"(",
"f1",
",",
"f2",
")",
"assert",
"len",
"(",
"f1_files",
")",
"==",
"len",
"(",
"f2_files",
")",
",",
"(",
"\"Number of files do not match: %d vs %d for %s vs %s.\"",
"%",
"(",
"len",
"(",
"f1_files",
")",
",",
"len",
"(",
"f2_files",
")",
",",
"f1",
",",
"f2",
")",
")",
"for",
"f1_i",
",",
"f2_i",
"in",
"zip",
"(",
"sorted",
"(",
"f1_files",
")",
",",
"sorted",
"(",
"f2_files",
")",
")",
":",
"l1_sentences",
",",
"l1",
"=",
"parse_file",
"(",
"f1_i",
")",
"l2_sentences",
",",
"l2",
"=",
"parse_file",
"(",
"f2_i",
")",
"assert",
"len",
"(",
"l1_sentences",
")",
"==",
"len",
"(",
"l2_sentences",
")",
",",
"(",
"\"Sizes do not match: %d vs %d for %s vs %s.\"",
"%",
"(",
"len",
"(",
"l1_sentences",
")",
",",
"len",
"(",
"l2_sentences",
")",
",",
"f1_i",
",",
"f2_i",
")",
")",
"for",
"s1",
",",
"s2",
"in",
"zip",
"(",
"l1_sentences",
",",
"l2_sentences",
")",
":",
"yield",
"{",
"l1",
":",
"s1",
",",
"l2",
":",
"s2",
"}"
] |
Serialize cfg into a libconfig - formatted str
|
def dumps ( cfg ) : str_file = io . StringIO ( ) dump ( cfg , str_file ) return str_file . getvalue ( )
| 2,969
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L715-L726
|
[
"def",
"process_response",
"(",
"self",
",",
"request",
",",
"response",
")",
":",
"try",
":",
"modified",
"=",
"request",
".",
"session",
".",
"modified",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"if",
"modified",
"or",
"settings",
".",
"SESSION_SAVE_EVERY_REQUEST",
":",
"if",
"request",
".",
"session",
".",
"get_expire_at_browser_close",
"(",
")",
":",
"max_age",
"=",
"None",
"expires",
"=",
"None",
"else",
":",
"max_age",
"=",
"request",
".",
"session",
".",
"get_expiry_age",
"(",
")",
"expires_time",
"=",
"time",
".",
"time",
"(",
")",
"+",
"max_age",
"expires",
"=",
"cookie_date",
"(",
"expires_time",
")",
"# Save the session data and refresh the client cookie.",
"request",
".",
"session",
".",
"save",
"(",
")",
"response",
".",
"set_cookie",
"(",
"settings",
".",
"SESSION_COOKIE_NAME",
",",
"request",
".",
"session",
".",
"session_key",
",",
"max_age",
"=",
"max_age",
",",
"expires",
"=",
"expires",
",",
"domain",
"=",
"settings",
".",
"SESSION_COOKIE_DOMAIN",
",",
"path",
"=",
"settings",
".",
"SESSION_COOKIE_PATH",
",",
"secure",
"=",
"settings",
".",
"SESSION_COOKIE_SECURE",
"or",
"None",
")",
"return",
"response"
] |
Serialize cfg as a libconfig - formatted stream into f
|
def dump ( cfg , f ) : if not isinstance ( cfg , dict ) : raise ConfigSerializeError ( 'dump() requires a dict as input, not %r of type %r' % ( cfg , type ( cfg ) ) ) dump_dict ( cfg , f , 0 )
| 2,970
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L729-L743
|
[
"def",
"unpack_unsubscribe_rsp",
"(",
"cls",
",",
"rsp_pb",
")",
":",
"if",
"rsp_pb",
".",
"retType",
"!=",
"RET_OK",
":",
"return",
"RET_ERROR",
",",
"rsp_pb",
".",
"retMsg",
",",
"None",
"return",
"RET_OK",
",",
"\"\"",
",",
"None"
] |
Yield tokens from the input string or throw ConfigParseError
|
def tokenize ( self , string ) : pos = 0 while pos < len ( string ) : m = SKIP_RE . match ( string , pos = pos ) if m : skip_lines = m . group ( 0 ) . split ( '\n' ) if len ( skip_lines ) > 1 : self . row += len ( skip_lines ) - 1 self . column = 1 + len ( skip_lines [ - 1 ] ) else : self . column += len ( skip_lines [ 0 ] ) pos = m . end ( ) continue for cls , type , regex in self . token_map : m = regex . match ( string , pos = pos ) if m : yield cls ( type , m . group ( 0 ) , self . filename , self . row , self . column ) self . column += len ( m . group ( 0 ) ) pos = m . end ( ) break else : raise ConfigParseError ( "Couldn't load config in %r row %d, column %d: %r" % ( self . filename , self . row , self . column , string [ pos : pos + 20 ] ) )
| 2,971
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L178-L206
|
[
"def",
"get_gae_versions",
"(",
")",
":",
"r",
"=",
"requests",
".",
"get",
"(",
"SDK_RELEASES_URL",
")",
"r",
".",
"raise_for_status",
"(",
")",
"releases",
"=",
"r",
".",
"json",
"(",
")",
".",
"get",
"(",
"'items'",
",",
"{",
"}",
")",
"# We only care about the Python releases, which all are in the format",
"# \"featured/google_appengine_{version}.zip\". We'll extract the version",
"# number so we can sort the list by version, and finally get the download",
"# URL.",
"versions_and_urls",
"=",
"[",
"]",
"for",
"release",
"in",
"releases",
":",
"match",
"=",
"PYTHON_RELEASE_RE",
".",
"match",
"(",
"release",
"[",
"'name'",
"]",
")",
"if",
"not",
"match",
":",
"continue",
"versions_and_urls",
".",
"append",
"(",
"(",
"[",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"match",
".",
"groups",
"(",
")",
"]",
",",
"release",
"[",
"'mediaLink'",
"]",
")",
")",
"return",
"sorted",
"(",
"versions_and_urls",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")"
] |
Create a token stream by reading an input file
|
def from_file ( cls , f , filename = None , includedir = '' , seenfiles = None ) : if filename is None : filename = getattr ( f , 'name' , '<unknown>' ) if seenfiles is None : seenfiles = set ( ) if filename in seenfiles : raise ConfigParseError ( "Circular include: %r" % ( filename , ) ) seenfiles = seenfiles | { filename } # Copy seenfiles, don't alter it. tokenizer = Tokenizer ( filename = filename ) lines = [ ] tokens = [ ] for line in f : m = re . match ( r'@include "(.*)"$' , line . strip ( ) ) if m : tokens . extend ( tokenizer . tokenize ( '' . join ( lines ) ) ) lines = [ re . sub ( r'\S' , ' ' , line ) ] includefilename = decode_escapes ( m . group ( 1 ) ) includefilename = os . path . join ( includedir , includefilename ) try : includefile = open ( includefilename , "r" ) except IOError : raise ConfigParseError ( "Could not open include file %r" % ( includefilename , ) ) with includefile : includestream = cls . from_file ( includefile , filename = includefilename , includedir = includedir , seenfiles = seenfiles ) tokens . extend ( includestream . tokens ) else : lines . append ( line ) tokens . extend ( tokenizer . tokenize ( '' . join ( lines ) ) ) return cls ( tokens )
| 2,972
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L224-L273
|
[
"def",
"distanceMetric",
"(",
"thing_A",
",",
"thing_B",
")",
":",
"# Get the types of the two inputs",
"typeA",
"=",
"type",
"(",
"thing_A",
")",
"typeB",
"=",
"type",
"(",
"thing_B",
")",
"if",
"typeA",
"is",
"list",
"and",
"typeB",
"is",
"list",
":",
"lenA",
"=",
"len",
"(",
"thing_A",
")",
"# If both inputs are lists, then the distance between",
"lenB",
"=",
"len",
"(",
"thing_B",
")",
"# them is the maximum distance between corresponding",
"if",
"lenA",
"==",
"lenB",
":",
"# elements in the lists. If they differ in length,",
"distance_temp",
"=",
"[",
"]",
"# the distance is the difference in lengths.",
"for",
"n",
"in",
"range",
"(",
"lenA",
")",
":",
"distance_temp",
".",
"append",
"(",
"distanceMetric",
"(",
"thing_A",
"[",
"n",
"]",
",",
"thing_B",
"[",
"n",
"]",
")",
")",
"distance",
"=",
"max",
"(",
"distance_temp",
")",
"else",
":",
"distance",
"=",
"float",
"(",
"abs",
"(",
"lenA",
"-",
"lenB",
")",
")",
"# If both inputs are numbers, return their difference",
"elif",
"(",
"typeA",
"is",
"int",
"or",
"typeB",
"is",
"float",
")",
"and",
"(",
"typeB",
"is",
"int",
"or",
"typeB",
"is",
"float",
")",
":",
"distance",
"=",
"float",
"(",
"abs",
"(",
"thing_A",
"-",
"thing_B",
")",
")",
"# If both inputs are array-like, return the maximum absolute difference b/w",
"# corresponding elements (if same shape); return largest difference in dimensions",
"# if shapes do not align.",
"elif",
"hasattr",
"(",
"thing_A",
",",
"'shape'",
")",
"and",
"hasattr",
"(",
"thing_B",
",",
"'shape'",
")",
":",
"if",
"thing_A",
".",
"shape",
"==",
"thing_B",
".",
"shape",
":",
"distance",
"=",
"np",
".",
"max",
"(",
"abs",
"(",
"thing_A",
"-",
"thing_B",
")",
")",
"else",
":",
"distance",
"=",
"np",
".",
"max",
"(",
"abs",
"(",
"thing_A",
".",
"shape",
"-",
"thing_B",
".",
"shape",
")",
")",
"# If none of the above cases, but the objects are of the same class, call",
"# the distance method of one on the other",
"elif",
"thing_A",
".",
"__class__",
".",
"__name__",
"==",
"thing_B",
".",
"__class__",
".",
"__name__",
":",
"if",
"thing_A",
".",
"__class__",
".",
"__name__",
"==",
"'function'",
":",
"distance",
"=",
"0.0",
"else",
":",
"distance",
"=",
"thing_A",
".",
"distance",
"(",
"thing_B",
")",
"else",
":",
"# Failsafe: the inputs are very far apart",
"distance",
"=",
"1000.0",
"return",
"distance"
] |
Raise a ConfigParseError at the current input position
|
def error ( self , msg ) : if self . finished ( ) : raise ConfigParseError ( "Unexpected end of input; %s" % ( msg , ) ) else : t = self . peek ( ) raise ConfigParseError ( "Unexpected token %s; %s" % ( t , msg ) )
| 2,973
|
https://github.com/Grk0/python-libconf/blob/9c4cf5f56d56ebbc1fe0e1596807218b7d5d5da4/libconf.py#L321-L327
|
[
"def",
"clean_weight_files",
"(",
"cls",
")",
":",
"deleted",
"=",
"[",
"]",
"for",
"f",
"in",
"cls",
".",
"_files",
":",
"try",
":",
"os",
".",
"remove",
"(",
"f",
")",
"deleted",
".",
"append",
"(",
"f",
")",
"except",
"FileNotFoundError",
":",
"pass",
"print",
"(",
"'Deleted %d weight files'",
"%",
"len",
"(",
"deleted",
")",
")",
"cls",
".",
"_files",
"=",
"[",
"]"
] |
Load variables from environment variables .
|
def load_variables ( ) : if ( not os . environ . get ( "PYCONFLUENCE_TOKEN" ) or not os . environ . get ( "PYCONFLUENCE_USER" ) or not os . environ . get ( "PYCONFLUENCE_ORG" ) ) : print ( "One or more pyconfluence environment variables are not set. " "See README for directions on how to resolve this." ) sys . exit ( "Error" ) global token global user global base_url token = os . environ [ "PYCONFLUENCE_TOKEN" ] user = os . environ [ "PYCONFLUENCE_USER" ] base_url = ( "https://" + os . environ [ "PYCONFLUENCE_ORG" ] + ".atlassian" ".net/wiki/rest/api/content" )
| 2,974
|
https://github.com/FulcrumTechnologies/pyconfluence/blob/a999726dbc1cbdd3d9062234698eeae799ce84ce/pyconfluence/api.py#L21-L36
|
[
"def",
"catalogFactory",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"fn",
"=",
"lambda",
"member",
":",
"inspect",
".",
"isclass",
"(",
"member",
")",
"and",
"member",
".",
"__module__",
"==",
"__name__",
"catalogs",
"=",
"odict",
"(",
"inspect",
".",
"getmembers",
"(",
"sys",
".",
"modules",
"[",
"__name__",
"]",
",",
"fn",
")",
")",
"if",
"name",
"not",
"in",
"list",
"(",
"catalogs",
".",
"keys",
"(",
")",
")",
":",
"msg",
"=",
"\"%s not found in catalogs:\\n %s\"",
"%",
"(",
"name",
",",
"list",
"(",
"kernels",
".",
"keys",
"(",
")",
")",
")",
"logger",
".",
"error",
"(",
"msg",
")",
"msg",
"=",
"\"Unrecognized catalog: %s\"",
"%",
"name",
"raise",
"Exception",
"(",
"msg",
")",
"return",
"catalogs",
"[",
"name",
"]",
"(",
"*",
"*",
"kwargs",
")"
] |
Main function to be called from this module .
|
def rest ( url , req = "GET" , data = None ) : load_variables ( ) return _rest ( base_url + url , req , data )
| 2,975
|
https://github.com/FulcrumTechnologies/pyconfluence/blob/a999726dbc1cbdd3d9062234698eeae799ce84ce/pyconfluence/api.py#L39-L47
|
[
"def",
"edit",
"(",
"self",
",",
"image_id",
",",
"name",
"=",
"None",
",",
"note",
"=",
"None",
",",
"tag",
"=",
"None",
")",
":",
"obj",
"=",
"{",
"}",
"if",
"name",
":",
"obj",
"[",
"'name'",
"]",
"=",
"name",
"if",
"note",
":",
"obj",
"[",
"'note'",
"]",
"=",
"note",
"if",
"obj",
":",
"self",
".",
"vgbdtg",
".",
"editObject",
"(",
"obj",
",",
"id",
"=",
"image_id",
")",
"if",
"tag",
":",
"self",
".",
"vgbdtg",
".",
"setTags",
"(",
"str",
"(",
"tag",
")",
",",
"id",
"=",
"image_id",
")",
"return",
"bool",
"(",
"name",
"or",
"note",
"or",
"tag",
")"
] |
Send a rest rest request to the server .
|
def _rest ( url , req , data = None ) : if url . upper ( ) . startswith ( "HTTPS" ) : print ( "Secure connection required: Please use HTTPS or https" ) return "" req = req . upper ( ) if req != "GET" and req != "PUT" and req != "POST" and req != "DELETE" : return "" status , body = _api_action ( url , req , data ) if ( int ( status ) >= 200 and int ( status ) <= 226 ) : return body else : return body
| 2,976
|
https://github.com/FulcrumTechnologies/pyconfluence/blob/a999726dbc1cbdd3d9062234698eeae799ce84ce/pyconfluence/api.py#L50-L64
|
[
"def",
"get_correlation_table",
"(",
"self",
",",
"chain",
"=",
"0",
",",
"parameters",
"=",
"None",
",",
"caption",
"=",
"\"Parameter Correlations\"",
",",
"label",
"=",
"\"tab:parameter_correlations\"",
")",
":",
"parameters",
",",
"cor",
"=",
"self",
".",
"get_correlations",
"(",
"chain",
"=",
"chain",
",",
"parameters",
"=",
"parameters",
")",
"return",
"self",
".",
"_get_2d_latex_table",
"(",
"parameters",
",",
"cor",
",",
"caption",
",",
"label",
")"
] |
Take action based on what kind of request is needed .
|
def _api_action ( url , req , data = None ) : requisite_headers = { 'Accept' : 'application/json' , 'Content-Type' : 'application/json' } auth = ( user , token ) if req == "GET" : response = requests . get ( url , headers = requisite_headers , auth = auth ) elif req == "PUT" : response = requests . put ( url , headers = requisite_headers , auth = auth , data = data ) elif req == "POST" : response = requests . post ( url , headers = requisite_headers , auth = auth , data = data ) elif req == "DELETE" : response = requests . delete ( url , headers = requisite_headers , auth = auth ) return response . status_code , response . text
| 2,977
|
https://github.com/FulcrumTechnologies/pyconfluence/blob/a999726dbc1cbdd3d9062234698eeae799ce84ce/pyconfluence/api.py#L67-L84
|
[
"def",
"_sync_with_file",
"(",
"self",
")",
":",
"self",
".",
"_records",
"=",
"[",
"]",
"i",
"=",
"-",
"1",
"for",
"i",
",",
"line",
"in",
"self",
".",
"_enum_lines",
"(",
")",
":",
"self",
".",
"_records",
".",
"append",
"(",
"None",
")",
"self",
".",
"_last_synced_index",
"=",
"i"
] |
Return all the patterns for specific platform .
|
def _platform_patterns ( self , platform = 'generic' , compiled = False ) : patterns = self . _dict_compiled . get ( platform , None ) if compiled else self . _dict_text . get ( platform , None ) if patterns is None : raise KeyError ( "Unknown platform: {}" . format ( platform ) ) return patterns
| 2,978
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/patterns.py#L56-L61
|
[
"async",
"def",
"activate",
"(",
"cls",
",",
"access_key",
":",
"str",
")",
"->",
"dict",
":",
"q",
"=",
"'mutation($access_key: String!, $input: ModifyKeyPairInput!) {'",
"+",
"' modify_keypair(access_key: $access_key, props: $input) {'",
"' ok msg'",
"' }'",
"'}'",
"variables",
"=",
"{",
"'access_key'",
":",
"access_key",
",",
"'input'",
":",
"{",
"'is_active'",
":",
"True",
",",
"'is_admin'",
":",
"None",
",",
"'resource_policy'",
":",
"None",
",",
"'rate_limit'",
":",
"None",
",",
"}",
",",
"}",
"rqst",
"=",
"Request",
"(",
"cls",
".",
"session",
",",
"'POST'",
",",
"'/admin/graphql'",
")",
"rqst",
".",
"set_json",
"(",
"{",
"'query'",
":",
"q",
",",
"'variables'",
":",
"variables",
",",
"}",
")",
"async",
"with",
"rqst",
".",
"fetch",
"(",
")",
"as",
"resp",
":",
"data",
"=",
"await",
"resp",
".",
"json",
"(",
")",
"return",
"data",
"[",
"'modify_keypair'",
"]"
] |
Return the pattern defined by the key string specific to the platform .
|
def pattern ( self , platform , key , compiled = True ) : patterns = self . _platform_patterns ( platform , compiled = compiled ) pattern = patterns . get ( key , self . _platform_patterns ( compiled = compiled ) . get ( key , None ) ) if pattern is None : raise KeyError ( "Patterns database corrupted. Platform: {}, Key: {}" . format ( platform , key ) ) return pattern
| 2,979
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/patterns.py#L76-L90
|
[
"def",
"upload_document_fileobj",
"(",
"file_obj",
",",
"file_name",
",",
"session",
",",
"documents_resource",
",",
"log",
"=",
"None",
")",
":",
"try",
":",
"fields",
"=",
"documents_resource",
".",
"init_multipart_upload",
"(",
")",
"except",
"requests",
".",
"exceptions",
".",
"HTTPError",
"as",
"e",
":",
"raise_api_error",
"(",
"e",
".",
"response",
",",
"state",
"=",
"\"init\"",
")",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
":",
"raise_connectivity_error",
"(",
"file_name",
")",
"s3_upload",
"=",
"_s3_intermediate_upload",
"(",
"file_obj",
",",
"file_name",
",",
"fields",
",",
"session",
",",
"documents_resource",
".",
"_client",
".",
"_root_url",
"+",
"fields",
"[",
"\"callback_url\"",
"]",
",",
"# full callback url",
")",
"document_id",
"=",
"s3_upload",
".",
"get",
"(",
"\"document_id\"",
",",
"\"<UUID not yet assigned>\"",
")",
"logging",
".",
"info",
"(",
"\"{}: finished as document {}\"",
".",
"format",
"(",
"file_name",
",",
"document_id",
")",
")",
"return",
"document_id"
] |
Return the patter description .
|
def description ( self , platform , key ) : patterns = self . _dict_dscr . get ( platform , None ) description = patterns . get ( key , None ) return description
| 2,980
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/patterns.py#L92-L96
|
[
"def",
"_assign_numbers",
"(",
"self",
")",
":",
"first",
"=",
"self",
".",
"select_related",
"(",
"'point_of_sales'",
",",
"'receipt_type'",
")",
".",
"first",
"(",
")",
"next_num",
"=",
"Receipt",
".",
"objects",
".",
"fetch_last_receipt_number",
"(",
"first",
".",
"point_of_sales",
",",
"first",
".",
"receipt_type",
",",
")",
"+",
"1",
"for",
"receipt",
"in",
"self",
".",
"filter",
"(",
"receipt_number__isnull",
"=",
"True",
")",
":",
"# Atomically update receipt number",
"Receipt",
".",
"objects",
".",
"filter",
"(",
"pk",
"=",
"receipt",
".",
"id",
",",
"receipt_number__isnull",
"=",
"True",
",",
")",
".",
"update",
"(",
"receipt_number",
"=",
"next_num",
",",
")",
"next_num",
"+=",
"1"
] |
Return the platform name based on the prompt matching .
|
def platform ( self , with_prompt , platforms = None ) : if platforms is None : platforms = self . _dict [ 'generic' ] [ 'prompt_detection' ] for platform in platforms : pattern = self . pattern ( platform , 'prompt' ) result = re . search ( pattern , with_prompt ) if result : return platform return None
| 2,981
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/patterns.py#L98-L108
|
[
"def",
"clean",
"(",
"self",
",",
"timeout",
"=",
"60",
")",
":",
"self",
".",
"refresh",
"(",
")",
"tds",
"=",
"self",
"[",
"'maxTotalDataSizeMB'",
"]",
"ftp",
"=",
"self",
"[",
"'frozenTimePeriodInSecs'",
"]",
"was_disabled_initially",
"=",
"self",
".",
"disabled",
"try",
":",
"if",
"(",
"not",
"was_disabled_initially",
"and",
"self",
".",
"service",
".",
"splunk_version",
"<",
"(",
"5",
",",
")",
")",
":",
"# Need to disable the index first on Splunk 4.x,",
"# but it doesn't work to disable it on 5.0.",
"self",
".",
"disable",
"(",
")",
"self",
".",
"update",
"(",
"maxTotalDataSizeMB",
"=",
"1",
",",
"frozenTimePeriodInSecs",
"=",
"1",
")",
"self",
".",
"roll_hot_buckets",
"(",
")",
"# Wait until event count goes to 0.",
"start",
"=",
"datetime",
".",
"now",
"(",
")",
"diff",
"=",
"timedelta",
"(",
"seconds",
"=",
"timeout",
")",
"while",
"self",
".",
"content",
".",
"totalEventCount",
"!=",
"'0'",
"and",
"datetime",
".",
"now",
"(",
")",
"<",
"start",
"+",
"diff",
":",
"sleep",
"(",
"1",
")",
"self",
".",
"refresh",
"(",
")",
"if",
"self",
".",
"content",
".",
"totalEventCount",
"!=",
"'0'",
":",
"raise",
"OperationError",
"(",
"\"Cleaning index %s took longer than %s seconds; timing out.\"",
"%",
"(",
"self",
".",
"name",
",",
"timeout",
")",
")",
"finally",
":",
"# Restore original values",
"self",
".",
"update",
"(",
"maxTotalDataSizeMB",
"=",
"tds",
",",
"frozenTimePeriodInSecs",
"=",
"ftp",
")",
"if",
"(",
"not",
"was_disabled_initially",
"and",
"self",
".",
"service",
".",
"splunk_version",
"<",
"(",
"5",
",",
")",
")",
":",
"# Re-enable the index if it was originally enabled and we messed with it.",
"self",
".",
"enable",
"(",
")",
"return",
"self"
] |
Execute after connect .
|
def after_connect ( self ) : # TODO: check if this works. show_users = self . device . send ( "show users" , timeout = 120 ) result = re . search ( pattern_manager . pattern ( self . platform , 'connected_locally' ) , show_users ) if result : self . log ( 'Locally connected to Calvados. Exiting.' ) self . device . send ( 'exit' ) return True return False
| 2,982
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/drivers/Calvados.py#L47-L56
|
[
"def",
"ReleaseFileObject",
"(",
"self",
",",
"file_object",
")",
":",
"identifier",
",",
"cache_value",
"=",
"self",
".",
"_file_object_cache",
".",
"GetCacheValueByObject",
"(",
"file_object",
")",
"if",
"not",
"identifier",
":",
"raise",
"RuntimeError",
"(",
"'Object not cached.'",
")",
"if",
"not",
"cache_value",
":",
"raise",
"RuntimeError",
"(",
"'Invalid cache value.'",
")",
"self",
".",
"_file_object_cache",
".",
"ReleaseObject",
"(",
"identifier",
")",
"result",
"=",
"cache_value",
".",
"IsDereferenced",
"(",
")",
"if",
"result",
":",
"self",
".",
"_file_object_cache",
".",
"RemoveObject",
"(",
"identifier",
")",
"return",
"result"
] |
Return hostname information from the Unix host .
|
def get_hostname_text ( self ) : # FIXME: fix it, too complex logic try : hostname_text = self . device . send ( 'hostname' , timeout = 10 ) if hostname_text : self . device . hostname = hostname_text . splitlines ( ) [ 0 ] return hostname_text except CommandError : self . log ( "Non Unix jumphost type detected" ) return None
| 2,983
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/drivers/jumphost.py#L35-L45
|
[
"def",
"commit",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"model",
"is",
"None",
"or",
"self",
".",
"model",
".",
"json",
"is",
"None",
":",
"raise",
"MissingModelError",
"(",
")",
"with",
"db",
".",
"session",
".",
"begin_nested",
"(",
")",
":",
"before_record_update",
".",
"send",
"(",
"current_app",
".",
"_get_current_object",
"(",
")",
",",
"record",
"=",
"self",
")",
"self",
".",
"validate",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"model",
".",
"json",
"=",
"dict",
"(",
"self",
")",
"flag_modified",
"(",
"self",
".",
"model",
",",
"'json'",
")",
"db",
".",
"session",
".",
"merge",
"(",
"self",
".",
"model",
")",
"after_record_update",
".",
"send",
"(",
"current_app",
".",
"_get_current_object",
"(",
")",
",",
"record",
"=",
"self",
")",
"return",
"self"
] |
Find a config file if possible .
|
def _find_file ( f ) : if os . path . isabs ( f ) : return f else : for d in Config . _dirs : _f = os . path . join ( d , f ) if os . path . isfile ( _f ) : return _f raise FiggypyError ( "could not find configuration file {} in dirs {}" . format ( f , Config . _dirs ) )
| 2,984
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/config.py#L82-L94
|
[
"def",
"delete_selection",
"(",
"self",
")",
":",
"selection",
"=",
"self",
".",
"selection",
"if",
"selection",
"is",
"None",
":",
"return",
"if",
"isinstance",
"(",
"selection",
",",
"ArrowWidget",
")",
":",
"self",
".",
"mainscreen",
".",
"boardview",
".",
"board",
".",
"rm_arrow",
"(",
"selection",
".",
"origin",
".",
"name",
",",
"selection",
".",
"destination",
".",
"name",
")",
"selection",
".",
"portal",
".",
"delete",
"(",
")",
"elif",
"isinstance",
"(",
"selection",
",",
"Spot",
")",
":",
"self",
".",
"mainscreen",
".",
"boardview",
".",
"board",
".",
"rm_spot",
"(",
"selection",
".",
"name",
")",
"selection",
".",
"proxy",
".",
"delete",
"(",
")",
"else",
":",
"assert",
"isinstance",
"(",
"selection",
",",
"Pawn",
")",
"self",
".",
"mainscreen",
".",
"boardview",
".",
"board",
".",
"rm_pawn",
"(",
"selection",
".",
"name",
")",
"selection",
".",
"proxy",
".",
"delete",
"(",
")",
"self",
".",
"selection",
"=",
"None"
] |
Get values from config file
|
def _load_file ( self , f ) : try : with open ( f , 'r' ) as _fo : _seria_in = seria . load ( _fo ) _y = _seria_in . dump ( 'yaml' ) except IOError : raise FiggypyError ( "could not open configuration file" ) self . values . update ( yaml . load ( _y ) )
| 2,985
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/config.py#L96-L104
|
[
"def",
"Nu_vertical_cylinder",
"(",
"Pr",
",",
"Gr",
",",
"L",
"=",
"None",
",",
"D",
"=",
"None",
",",
"Method",
"=",
"None",
",",
"AvailableMethods",
"=",
"False",
")",
":",
"def",
"list_methods",
"(",
")",
":",
"methods",
"=",
"[",
"]",
"for",
"key",
",",
"values",
"in",
"vertical_cylinder_correlations",
".",
"items",
"(",
")",
":",
"if",
"values",
"[",
"4",
"]",
"or",
"all",
"(",
"(",
"L",
",",
"D",
")",
")",
":",
"methods",
".",
"append",
"(",
"key",
")",
"if",
"'Popiel & Churchill'",
"in",
"methods",
":",
"methods",
".",
"remove",
"(",
"'Popiel & Churchill'",
")",
"methods",
".",
"insert",
"(",
"0",
",",
"'Popiel & Churchill'",
")",
"elif",
"'McAdams, Weiss & Saunders'",
"in",
"methods",
":",
"methods",
".",
"remove",
"(",
"'McAdams, Weiss & Saunders'",
")",
"methods",
".",
"insert",
"(",
"0",
",",
"'McAdams, Weiss & Saunders'",
")",
"return",
"methods",
"if",
"AvailableMethods",
":",
"return",
"list_methods",
"(",
")",
"if",
"not",
"Method",
":",
"Method",
"=",
"list_methods",
"(",
")",
"[",
"0",
"]",
"if",
"Method",
"in",
"vertical_cylinder_correlations",
":",
"if",
"vertical_cylinder_correlations",
"[",
"Method",
"]",
"[",
"4",
"]",
":",
"return",
"vertical_cylinder_correlations",
"[",
"Method",
"]",
"[",
"0",
"]",
"(",
"Pr",
"=",
"Pr",
",",
"Gr",
"=",
"Gr",
")",
"else",
":",
"return",
"vertical_cylinder_correlations",
"[",
"Method",
"]",
"[",
"0",
"]",
"(",
"Pr",
"=",
"Pr",
",",
"Gr",
"=",
"Gr",
",",
"L",
"=",
"L",
",",
"D",
"=",
"D",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"Correlation name not recognized; see the \"",
"\"documentation for the available options.\"",
")"
] |
Make setup easier by providing a constructor method .
|
def setup ( self , config_file = None , aws_config = None , gpg_config = None , decrypt_gpg = True , decrypt_kms = True ) : if aws_config is not None : self . aws_config = aws_config if gpg_config is not None : self . gpg_config = gpg_config if decrypt_kms is not None : self . decrypt_kms = decrypt_kms if decrypt_gpg is not None : self . decrypt_gpg = decrypt_gpg # Again, load the file last so that it can rely on other properties. if config_file is not None : self . config_file = config_file return self
| 2,986
|
https://github.com/theherk/figgypy/blob/324d1b281a8df20a26b92f42bf7fda0cca892116/figgypy/config.py#L199-L227
|
[
"def",
"get_requests_for_local_unit",
"(",
"relation_name",
"=",
"None",
")",
":",
"local_name",
"=",
"local_unit",
"(",
")",
".",
"replace",
"(",
"'/'",
",",
"'_'",
")",
"raw_certs_key",
"=",
"'{}.processed_requests'",
".",
"format",
"(",
"local_name",
")",
"relation_name",
"=",
"relation_name",
"or",
"'certificates'",
"bundles",
"=",
"[",
"]",
"for",
"rid",
"in",
"relation_ids",
"(",
"relation_name",
")",
":",
"for",
"unit",
"in",
"related_units",
"(",
"rid",
")",
":",
"data",
"=",
"relation_get",
"(",
"rid",
"=",
"rid",
",",
"unit",
"=",
"unit",
")",
"if",
"data",
".",
"get",
"(",
"raw_certs_key",
")",
":",
"bundles",
".",
"append",
"(",
"{",
"'ca'",
":",
"data",
"[",
"'ca'",
"]",
",",
"'chain'",
":",
"data",
".",
"get",
"(",
"'chain'",
")",
",",
"'certs'",
":",
"json",
".",
"loads",
"(",
"data",
"[",
"raw_certs_key",
"]",
")",
"}",
")",
"return",
"bundles"
] |
Authenticate using the Console Server protocol specific FSM .
|
def authenticate ( self , driver ) : # 0 1 2 3 events = [ driver . username_re , driver . password_re , self . device . prompt_re , driver . rommon_re , # 4 5 6 7 8 driver . unable_to_connect_re , driver . authentication_error_re , pexpect . TIMEOUT , pexpect . EOF ] transitions = [ ( driver . username_re , [ 0 ] , 1 , partial ( a_send_username , self . username ) , 10 ) , ( driver . username_re , [ 1 ] , 1 , None , 10 ) , ( driver . password_re , [ 0 , 1 ] , 2 , partial ( a_send_password , self . _acquire_password ( ) ) , _C [ 'first_prompt_timeout' ] ) , ( driver . username_re , [ 2 ] , - 1 , a_authentication_error , 0 ) , ( driver . password_re , [ 2 ] , - 1 , a_authentication_error , 0 ) , ( driver . authentication_error_re , [ 1 , 2 ] , - 1 , a_authentication_error , 0 ) , ( self . device . prompt_re , [ 0 , 1 , 2 ] , - 1 , None , 0 ) , ( driver . rommon_re , [ 0 ] , - 1 , partial ( a_send , "\r\n" ) , 0 ) , ( pexpect . TIMEOUT , [ 0 ] , 1 , partial ( a_send , "\r\n" ) , 10 ) , ( pexpect . TIMEOUT , [ 2 ] , - 1 , None , 0 ) , ( pexpect . TIMEOUT , [ 3 , 7 ] , - 1 , ConnectionTimeoutError ( "Connection Timeout" , self . hostname ) , 0 ) , ( driver . unable_to_connect_re , [ 0 , 1 , 2 ] , - 1 , a_unable_to_connect , 0 ) , ] self . log ( "EXPECTED_PROMPT={}" . format ( pattern_to_str ( self . device . prompt_re ) ) ) fsm = FSM ( "CONSOLE-SERVER-AUTH" , self . device , events , transitions , timeout = _C [ 'connect_timeout' ] , init_pattern = self . last_pattern ) return fsm . run ( )
| 2,987
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/protocols/console.py#L70-L95
|
[
"def",
"extract",
"(",
"self",
",",
"disk",
",",
"files",
",",
"path",
"=",
"'.'",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Extracting files.\"",
")",
"extracted_files",
",",
"failed",
"=",
"self",
".",
"_extract_files",
"(",
"disk",
",",
"files",
",",
"path",
")",
"return",
"{",
"'extracted_files'",
":",
"[",
"f",
"for",
"f",
"in",
"extracted_files",
".",
"keys",
"(",
")",
"]",
",",
"'extraction_errors'",
":",
"[",
"f",
"for",
"f",
"in",
"failed",
".",
"keys",
"(",
")",
"]",
"}"
] |
Pass the call to the attribute called attribute_name for every method listed in method_names .
|
def delegate ( attribute_name , method_names ) : # hack for python 2.7 as nonlocal is not available info = { 'attribute' : attribute_name , 'methods' : method_names } def decorator ( cls ) : """Decorate class.""" attribute = info [ 'attribute' ] if attribute . startswith ( "__" ) : attribute = "_" + cls . __name__ + attribute for name in info [ 'methods' ] : setattr ( cls , name , eval ( "lambda self, *a, **kw: " "self.{0}.{1}(*a, **kw)" . format ( attribute , name ) ) ) return cls return decorator
| 2,988
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L12-L29
|
[
"def",
"write_asc_file",
"(",
"filename",
",",
"data",
",",
"xsize",
",",
"ysize",
",",
"geotransform",
",",
"nodata_value",
")",
":",
"UtilClass",
".",
"mkdir",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"FileClass",
".",
"get_file_fullpath",
"(",
"filename",
")",
")",
")",
"header",
"=",
"'NCOLS %d\\n'",
"'NROWS %d\\n'",
"'XLLCENTER %f\\n'",
"'YLLCENTER %f\\n'",
"'CELLSIZE %f\\n'",
"'NODATA_VALUE %f'",
"%",
"(",
"xsize",
",",
"ysize",
",",
"geotransform",
"[",
"0",
"]",
"+",
"0.5",
"*",
"geotransform",
"[",
"1",
"]",
",",
"geotransform",
"[",
"3",
"]",
"-",
"(",
"ysize",
"-",
"0.5",
")",
"*",
"geotransform",
"[",
"1",
"]",
",",
"geotransform",
"[",
"1",
"]",
",",
"nodata_value",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"header",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"ysize",
")",
":",
"for",
"j",
"in",
"range",
"(",
"0",
",",
"xsize",
")",
":",
"f",
".",
"write",
"(",
"'%s\\t'",
"%",
"repr",
"(",
"data",
"[",
"i",
"]",
"[",
"j",
"]",
")",
")",
"f",
".",
"write",
"(",
"'\\n'",
")",
"f",
".",
"close",
"(",
")"
] |
Convert regex pattern to string .
|
def pattern_to_str ( pattern ) : if isinstance ( pattern , str ) : return repr ( pattern ) else : return repr ( pattern . pattern ) if pattern else None
| 2,989
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L85-L96
|
[
"def",
"update",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"_md",
".",
"update",
"(",
"data",
")",
"bufpos",
"=",
"self",
".",
"_nbytes",
"&",
"63",
"self",
".",
"_nbytes",
"+=",
"len",
"(",
"data",
")",
"if",
"self",
".",
"_rarbug",
"and",
"len",
"(",
"data",
")",
">",
"64",
":",
"dpos",
"=",
"self",
".",
"block_size",
"-",
"bufpos",
"while",
"dpos",
"+",
"self",
".",
"block_size",
"<=",
"len",
"(",
"data",
")",
":",
"self",
".",
"_corrupt",
"(",
"data",
",",
"dpos",
")",
"dpos",
"+=",
"self",
".",
"block_size"
] |
Calculate the Levenshtein distance between string a and b .
|
def levenshtein_distance ( str_a , str_b ) : len_a , len_b = len ( str_a ) , len ( str_b ) if len_a > len_b : str_a , str_b = str_b , str_a len_a , len_b = len_b , len_a current = range ( len_a + 1 ) for i in range ( 1 , len_b + 1 ) : previous , current = current , [ i ] + [ 0 ] * len_a for j in range ( 1 , len_a + 1 ) : add , delete = previous [ j ] + 1 , current [ j - 1 ] + 1 change = previous [ j - 1 ] if str_a [ j - 1 ] != str_b [ i - 1 ] : change += + 1 current [ j ] = min ( add , delete , change ) return current [ len_a ]
| 2,990
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L99-L119
|
[
"def",
"_ParsePage",
"(",
"self",
",",
"parser_mediator",
",",
"file_offset",
",",
"page_data",
")",
":",
"page_header_map",
"=",
"self",
".",
"_GetDataTypeMap",
"(",
"'binarycookies_page_header'",
")",
"try",
":",
"page_header",
"=",
"self",
".",
"_ReadStructureFromByteStream",
"(",
"page_data",
",",
"file_offset",
",",
"page_header_map",
")",
"except",
"(",
"ValueError",
",",
"errors",
".",
"ParseError",
")",
"as",
"exception",
":",
"raise",
"errors",
".",
"ParseError",
"(",
"(",
"'Unable to map page header data at offset: 0x{0:08x} with error: '",
"'{1!s}'",
")",
".",
"format",
"(",
"file_offset",
",",
"exception",
")",
")",
"for",
"record_offset",
"in",
"page_header",
".",
"offsets",
":",
"if",
"parser_mediator",
".",
"abort",
":",
"break",
"self",
".",
"_ParseRecord",
"(",
"parser_mediator",
",",
"page_data",
",",
"record_offset",
")"
] |
Parse the inventory text and return udi dict .
|
def parse_inventory ( inventory_output = None ) : udi = { "name" : "" , "description" : "" , "pid" : "" , "vid" : "" , "sn" : "" } if inventory_output is None : return udi # find the record with chassis text in name or descr capture_next = False chassis_udi_text = None for line in inventory_output . split ( '\n' ) : lc_line = line . lower ( ) if ( 'chassis' in lc_line or 'switch system' in lc_line or 'rack' in lc_line ) and 'name' in lc_line and 'descr' : capture_next = True chassis_udi_text = line continue if capture_next : inventory_output = chassis_udi_text + "\n" + line break match = re . search ( r"(?i)NAME: (?P<name>.*?),? (?i)DESCR" , inventory_output , re . MULTILINE ) if match : udi [ 'name' ] = match . group ( 'name' ) . strip ( '" ,' ) match = re . search ( r"(?i)DESCR: (?P<description>.*)" , inventory_output , re . MULTILINE ) if match : udi [ 'description' ] = match . group ( 'description' ) . strip ( '" ' ) match = re . search ( r"(?i)PID: (?P<pid>.*?),? " , inventory_output , re . MULTILINE ) if match : udi [ 'pid' ] = match . group ( 'pid' ) match = re . search ( r"(?i)VID: (?P<vid>.*?),? " , inventory_output , re . MULTILINE ) if match : udi [ 'vid' ] = match . group ( 'vid' ) match = re . search ( r"(?i)SN: (?P<sn>.*)" , inventory_output , re . MULTILINE ) if match : udi [ 'sn' ] = match . group ( 'sn' ) . strip ( ) return udi
| 2,991
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L122-L165
|
[
"def",
"close_handle",
"(",
"self",
")",
":",
"try",
":",
"if",
"hasattr",
"(",
"self",
".",
"hFile",
",",
"'close'",
")",
":",
"self",
".",
"hFile",
".",
"close",
"(",
")",
"elif",
"self",
".",
"hFile",
"not",
"in",
"(",
"None",
",",
"win32",
".",
"INVALID_HANDLE_VALUE",
")",
":",
"win32",
".",
"CloseHandle",
"(",
"self",
".",
"hFile",
")",
"finally",
":",
"self",
".",
"hFile",
"=",
"None"
] |
Overload urls and make list of lists of urls .
|
def normalize_urls ( urls ) : _urls = [ ] if isinstance ( urls , list ) : if urls : if isinstance ( urls [ 0 ] , list ) : # multiple connections (list of the lists) _urls = urls elif isinstance ( urls [ 0 ] , str ) : # single connections (make it list of the lists) _urls = [ urls ] else : raise RuntimeError ( "No target host url provided." ) elif isinstance ( urls , str ) : _urls = [ [ urls ] ] return _urls
| 2,992
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L233-L248
|
[
"def",
"create",
"(",
"cls",
",",
"destination",
")",
":",
"mdb_gz_b64",
"=",
"\"\"\"\\\n H4sICIenn1gC/25ldzIwMDMubWRiAO2de2wcRx3Hf7O7Pt/d3u6eLyEtVaOaqg+EkjQvuVVDwa9a\n jWXHdZxQQlCJ7fOrfp3OTpqkhVxTItFWIhVQVFBRVNIKRaColVpAUKGKRwwFqUAhKiBIpUaoVWP+\n qKgIIHL8Znb39u72znWJiWP3+9l473fzm/nNY3cdf2fmbBJEPdO9E+nebLq+fWC6vrWZOImen9D7\n 9sR+vPPNE0PZxo/TE5879mj+yNc3/OzAD2bXv3DmV9/o/8PZnxxr+/fDL2w79ulzN7e+/sS/zvzz\n w3+N1z28p3PTfQ3nfn/m2YmeFS2no89uWnvqwO5HUvd/5Phr938tes3j/zm5+qT41J8/P/iZx87/\n +qHrjgyduubG1t/+7eWB2XztTNuT+1clZt9c2/e7HRGizevWEwAAAAAAAACAhUEIwvE+PoRIO8K7\n FzT6obPPwTMBAAAAAAAAAABcfpzPXwya+Ispo1xlEO2KEEX9eaGyWnrqyKQ60tQ0AcNZRcR1RYuy\n +XZCxoqRzmaMI6cKGRJuJVrIEZUOQ9UrHStUYpyzKkdNmSPFDkM6aguhXMdVHCMuHXE2Suu4IFQJ\n l6CErNWUDouDlbdKOZIcrKLD4S5WdNhqIEodqlVaofKgVTHpiBQ6uLG0uaKsuYbf3IS8BmV1qFAm\n j1Z5Hbp06GWDKC+DTS00SRN8DFA/TXNfW6mXX3upj7+mOHWllzLAObN8du0gdSdlKO3ZcWqjMbaH\n uOQqtidViRF+P0HbOH2c3xm0lfMb1EH7uHZ5vp32c+ks+5PqfSeXS9NejjTAvZQpd7J3kuuJFqLE\n qYvuVa3Ocqk7OVXWNMFxZPRVtJ1zSXuCBrlkh+rjEF1Zlt5Dw6qN0xx5Bx3gGgbowVo56EIjkc9T\n xX9Jdd+5PKDOD6q3VQvwv7qiZ8st419cdYHlo6iuriF8X4HA590AsodXhvrsj0yMDPnAuI+ZvOrq\n 1o7K51Hdy7a8cdXNm5AedbfG5W3j3lOybxFZKb6zAgAAAAAAsNzQxAlbvnYJV3VcUU3/S2luBIKF\n ha+IlWp+wxW4IiRXRSXxKeNU1eOxUuUbSOIINbEM7WT506ZE3LASgCOeYJWCMcnCsI/u8eSsFEYR\n lnlbWa6+u0jTYqSkvuQL9G5CLFwTRBMAAAAAAAAAgMtW/79lyVdLKxW7oqDF3bXOniib0UD/m/xq\n loWqvFwt3DX/mrLNALIu3V35NkpK1JDmL+2XOmr9pf1gKiFY4I672wc0mveaf6zaenyKmljPT6t5\n hT7a6y13y0XqjFpwneJjRC0oRwvL3eUL2fHCcuyGIntjhTkDuZCd5Vc5j+HNUMyx+myYcpHW5YG5\n ZijUdbg2VFu4ZzzcHFM3seQLAAAAAAAAAMtc//9S6cm1emX97ytK1v81rHelhtfVfAFnseZXRdV9\n Ad7+dhGS5kbl3eqe/K8pU/nnYwX5X2VeoLbCZwHi7txD6aTELabnoLJ5AfPFC8JmFd3Pun+MlfM4\n q/846/4s62i5+8Dmc7EvSVN0UG2tL00p1uPXqZTt/G5QqX+5lbufz+mSctVzFce6upBrTG3Fd+cn\n pmiYrUyw8+GNfL4hn8/k83qZrVlyGzgPeqbhjcOqx7KMEZRpU/MPQ+rsldEtuYm8vExkznoMS+6b\n KC5TZRt8wVf4xEkFX4V5D/X2vYz1/EcR8yMAAAAAAACAJY0Qf/d3vLPUlb//b4Nzzv6W3Wevtl+1\n vmxts2LWTxOHErcm3jGfMUfNG0yMGQAAAAAAeJ/8rLwAMXIYRgCARFv8IIaYtKpGqCdqlN/2kupD\n /ob67qXhsi0lDh2Vp6728faO9tHuUflfWJ1wE0e6724f35XuG71r16Dr0FwH573by6rKi0N7RveN\n tnd6aTVBWrpjd3fnuJtsBMnDk90ju7zckSA5XGGtdGrK2dWhUnRcMgAAAAAAAAD4v2CIV6vqf82I\n Jusbcwsy7wkWSf/n1JQNq/Oc+uQGq/ecmsphYZ6Tn6XwRLjwxb7mTxDoakLgURUFshwAAAAAAAAA\n ljpCrHZ8W/f2/2NUAAAAAAAAAAAAhXH5RLm4IIbotqot7hbW/0MGWCp46/+pgpHwjZS3IyAlfMPy\n tgakNN+wfcPxNgukdN9I+kadt30gZfhGjW+s8I2V3s6CVNTbWZCK+Eatb3zAN1Z5mw5SMd+I+wZ+\n +QQAAAAAAAAA/K8IcdT27Zqi3/+HkQEAAAAAAAAAsGgkMQQLjSHqbQPDAAAAAAAAAAAALGuw/g8A\n AAAAAAAA4DJUqwsQI7cQDWlcLiMq1/9rcGMBAAAAAAAAAADLGuh/AAAAAAAAAAAA+h8AAAAAAAAA\n AABLHyHusDTPjtLzTtoxnRftUftqe8YatDA+AAAAAAAAAPDeqJN/KVt+et0R9PYnzz7W8PrZRv+V\n HblO6qEDNEXbaYDGqJemaYQmaYJThtnK8Gvzb1opfDRTPZmUlxUY86qgm/ZyFVkOOqCC3kLhoyEI\n qs8raBO10O0q3EYKH+uDcNq8wnVRH93D7evnYZhHG5kkB3a0OYO2ctCWV9ZR+FhT0l2HCzl6xVBz\n XZyPUvi4taTjcwRuVUF7uYW9HMy9MJspfGwMAoo5A+5Qwca8UHN2WogeU/fu0ito1vmjM+M85zzp\n fNG5zxl2djrNzk3O9+0m+yWrx2q0fpH4buJ4Yk3ig4lvmkfxx9gBAAAAAAC4OAylQfJ5h5pfSVCc\n f853gqSmWPSZux6xjUznltH2HT/flNu7++0NZ7/07cg/vnPbVu30y6d/NLvlabPh+j81v/Xc5g9l\n 1h2f+epn9+VPdN90OHHvU50fm94y/ZXvWQ/tP/yJG/NH3llz8A79tlNPG72DHSePHdzz2s3XPzVj\n vzSUvSHjVys1Rv5CSUv8pEvcEqkbV/KX35JaQ+npikmRS9o4rtYIt8RYnJa4Ou6SV6stTm+l7rcX\n q9qSy+23pCVIcgV/SZKuJj5CSRc4Y/PpkiesLJcI53J37NvFuQzv4peGL0/SypP+C+45xVAAMAEA\n \"\"\"",
"pristine",
"=",
"StringIO",
"(",
")",
"pristine",
".",
"write",
"(",
"base64",
".",
"b64decode",
"(",
"mdb_gz_b64",
")",
")",
"pristine",
".",
"seek",
"(",
"0",
")",
"pristine",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"pristine",
",",
"mode",
"=",
"'rb'",
")",
"with",
"open",
"(",
"destination",
",",
"'wb'",
")",
"as",
"handle",
":",
"shutil",
".",
"copyfileobj",
"(",
"pristine",
",",
"handle",
")",
"return",
"cls",
"(",
"destination",
")"
] |
Read yaml file and return the dict .
|
def yaml_file_to_dict ( script_name , path = None ) : def load_yaml ( file_path ) : """Load YAML file from full file path and return dict.""" with open ( file_path , 'r' ) as yamlfile : try : dictionary = yaml . load ( yamlfile ) except yaml . YAMLError : return { } return dictionary def merge ( user , default ) : """Merge two dicts.""" if isinstance ( user , dict ) and isinstance ( default , dict ) : for k , v in default . iteritems ( ) : if k not in user : user [ k ] = v else : user [ k ] = merge ( user [ k ] , v ) return user if path is None : path = os . path . abspath ( '.' ) config_file_path = os . path . join ( path , script_name + '.yaml' ) if not os . path . exists ( config_file_path ) : raise RuntimeError ( 'Config file does not exist: {}' . format ( config_file_path ) ) default_dict = load_yaml ( config_file_path ) user_config_file_path = os . path . join ( os . path . expanduser ( '~' ) , '.condoor' , os . path . basename ( script_name ) + '.yaml' ) user_config_file_path = os . getenv ( 'CONDOOR_' + os . path . basename ( script_name ) . upper ( ) , user_config_file_path ) if os . path . exists ( user_config_file_path ) : user_dict = load_yaml ( user_config_file_path ) if user_dict : default_dict = merge ( user_dict , default_dict ) return default_dict
| 2,993
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L280-L323
|
[
"def",
"user_deleted_from_site_event",
"(",
"event",
")",
":",
"userid",
"=",
"event",
".",
"principal",
"catalog",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_catalog'",
")",
"query",
"=",
"{",
"'object_provides'",
":",
"WORKSPACE_INTERFACE",
"}",
"query",
"[",
"'workspace_members'",
"]",
"=",
"userid",
"workspaces",
"=",
"[",
"IWorkspace",
"(",
"b",
".",
"_unrestrictedGetObject",
"(",
")",
")",
"for",
"b",
"in",
"catalog",
".",
"unrestrictedSearchResults",
"(",
"query",
")",
"]",
"for",
"workspace",
"in",
"workspaces",
":",
"workspace",
".",
"remove_from_team",
"(",
"userid",
")"
] |
Override the standard write method to filter the content .
|
def write ( self , text ) : index = text . find ( '\n' ) if index == - 1 : self . _buffer = self . _buffer + text else : self . _buffer = self . _buffer + text [ : index + 1 ] if self . _pattern : # pattern already compiled no need to check result = re . search ( self . _pattern , self . _buffer ) if result : for group in result . groups ( ) : if group : self . _buffer = self . _buffer . replace ( group , "***" ) self . _file . write ( self . _buffer ) self . _file . flush ( ) self . _buffer = text [ index + 1 : ]
| 2,994
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/utils.py#L193-L209
|
[
"def",
"_get_cur_remotes",
"(",
"path",
")",
":",
"cur_remotes",
"=",
"set",
"(",
"[",
"]",
")",
"if",
"isinstance",
"(",
"path",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"for",
"v",
"in",
"path",
":",
"cur_remotes",
"|=",
"_get_cur_remotes",
"(",
"v",
")",
"elif",
"isinstance",
"(",
"path",
",",
"dict",
")",
":",
"for",
"v",
"in",
"path",
".",
"values",
"(",
")",
":",
"cur_remotes",
"|=",
"_get_cur_remotes",
"(",
"v",
")",
"elif",
"path",
"and",
"isinstance",
"(",
"path",
",",
"six",
".",
"string_types",
")",
":",
"if",
"path",
".",
"startswith",
"(",
"tuple",
"(",
"INTEGRATION_MAP",
".",
"keys",
"(",
")",
")",
")",
":",
"cur_remotes",
".",
"add",
"(",
"INTEGRATION_MAP",
".",
"get",
"(",
"path",
".",
"split",
"(",
"\":\"",
")",
"[",
"0",
"]",
"+",
"\":\"",
")",
")",
"return",
"cur_remotes"
] |
Start profiler .
|
def start ( builtins = False , profile_threads = True ) : if profile_threads : threading . setprofile ( _callback ) _yappi . start ( builtins , profile_threads )
| 2,995
|
https://github.com/ajdavis/GreenletProfiler/blob/700349864a4f368a8a73a2a60f048c2e818d7cea/_vendorized_yappi/yappi.py#L700-L706
|
[
"def",
"_add_dependency",
"(",
"self",
",",
"dependency",
",",
"var_name",
"=",
"None",
")",
":",
"if",
"var_name",
"is",
"None",
":",
"var_name",
"=",
"next",
"(",
"self",
".",
"temp_var_names",
")",
"# Don't add duplicate dependencies",
"if",
"(",
"dependency",
",",
"var_name",
")",
"not",
"in",
"self",
".",
"dependencies",
":",
"self",
".",
"dependencies",
".",
"append",
"(",
"(",
"dependency",
",",
"var_name",
")",
")",
"return",
"var_name"
] |
Sets the internal clock type for timing . Profiler shall not have any previous stats . Otherwise an exception is thrown .
|
def set_clock_type ( type ) : type = type . upper ( ) if type not in CLOCK_TYPES : raise YappiError ( "Invalid clock type:%s" % ( type ) ) _yappi . set_clock_type ( CLOCK_TYPES [ type ] )
| 2,996
|
https://github.com/ajdavis/GreenletProfiler/blob/700349864a4f368a8a73a2a60f048c2e818d7cea/_vendorized_yappi/yappi.py#L755-L764
|
[
"def",
"push",
"(",
"package",
",",
"is_public",
"=",
"False",
",",
"is_team",
"=",
"False",
",",
"reupload",
"=",
"False",
",",
"hash",
"=",
"None",
")",
":",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
"=",
"parse_package",
"(",
"package",
",",
"allow_subpath",
"=",
"True",
")",
"_check_team_id",
"(",
"team",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"store",
",",
"pkgroot",
"=",
"PackageStore",
".",
"find_package",
"(",
"team",
",",
"owner",
",",
"pkg",
",",
"pkghash",
"=",
"hash",
")",
"if",
"pkgroot",
"is",
"None",
":",
"raise",
"CommandException",
"(",
"\"Package {package} not found.\"",
".",
"format",
"(",
"package",
"=",
"package",
")",
")",
"pkghash",
"=",
"hash_contents",
"(",
"pkgroot",
")",
"if",
"hash",
"is",
"not",
"None",
":",
"assert",
"pkghash",
"==",
"hash",
"contents",
"=",
"pkgroot",
"for",
"component",
"in",
"subpath",
":",
"try",
":",
"contents",
"=",
"contents",
".",
"children",
"[",
"component",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"raise",
"CommandException",
"(",
"\"Invalid subpath: %r\"",
"%",
"component",
")",
"def",
"_push_package",
"(",
"dry_run",
"=",
"False",
",",
"sizes",
"=",
"dict",
"(",
")",
")",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"dry_run",
"=",
"dry_run",
",",
"is_public",
"=",
"is_public",
",",
"is_team",
"=",
"is_team",
",",
"contents",
"=",
"contents",
",",
"description",
"=",
"\"\"",
",",
"# TODO",
"sizes",
"=",
"sizes",
")",
",",
"default",
"=",
"encode_node",
")",
"compressed_data",
"=",
"gzip_compress",
"(",
"data",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"if",
"subpath",
":",
"return",
"session",
".",
"post",
"(",
"\"{url}/api/package_update/{owner}/{pkg}/{subpath}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"subpath",
"=",
"'/'",
".",
"join",
"(",
"subpath",
")",
")",
",",
"data",
"=",
"compressed_data",
",",
"headers",
"=",
"{",
"'Content-Encoding'",
":",
"'gzip'",
"}",
")",
"else",
":",
"return",
"session",
".",
"put",
"(",
"\"{url}/api/package/{owner}/{pkg}/{hash}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"hash",
"=",
"pkghash",
")",
",",
"data",
"=",
"compressed_data",
",",
"headers",
"=",
"{",
"'Content-Encoding'",
":",
"'gzip'",
"}",
")",
"print",
"(",
"\"Fetching upload URLs from the registry...\"",
")",
"resp",
"=",
"_push_package",
"(",
"dry_run",
"=",
"True",
")",
"obj_urls",
"=",
"resp",
".",
"json",
"(",
")",
"[",
"'upload_urls'",
"]",
"assert",
"set",
"(",
"obj_urls",
")",
"==",
"set",
"(",
"find_object_hashes",
"(",
"contents",
")",
")",
"obj_sizes",
"=",
"{",
"obj_hash",
":",
"os",
".",
"path",
".",
"getsize",
"(",
"store",
".",
"object_path",
"(",
"obj_hash",
")",
")",
"for",
"obj_hash",
"in",
"obj_urls",
"}",
"success",
"=",
"upload_fragments",
"(",
"store",
",",
"obj_urls",
",",
"obj_sizes",
",",
"reupload",
"=",
"reupload",
")",
"if",
"not",
"success",
":",
"raise",
"CommandException",
"(",
"\"Failed to upload fragments\"",
")",
"print",
"(",
"\"Uploading package metadata...\"",
")",
"resp",
"=",
"_push_package",
"(",
"sizes",
"=",
"obj_sizes",
")",
"package_url",
"=",
"resp",
".",
"json",
"(",
")",
"[",
"'package_url'",
"]",
"if",
"not",
"subpath",
":",
"# Update the latest tag.",
"print",
"(",
"\"Updating the 'latest' tag...\"",
")",
"session",
".",
"put",
"(",
"\"{url}/api/tag/{owner}/{pkg}/{tag}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"tag",
"=",
"LATEST_TAG",
")",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"hash",
"=",
"pkghash",
")",
")",
")",
"print",
"(",
"\"Push complete. %s is live:\\n%s\"",
"%",
"(",
"package",
",",
"package_url",
")",
")"
] |
Reads a reply from the server .
|
async def read_reply ( self ) : code = 500 messages = [ ] go_on = True while go_on : try : line = await self . readline ( ) except ValueError as e : # ValueError is raised when limit is reached before we could # get an entire line. # We return what we got with a 500 code and we stop to read # the reply to avoid being flooded. code = 500 go_on = False else : try : code = int ( line [ : 3 ] ) except ValueError as e : # We either: # - Got an empty line (connection is probably down), # - Got a line without a valid return code. # In both case, it shouldn't happen, hence: raise ConnectionResetError ( "Connection lost." ) from e else : # Check is we have a multiline response: go_on = line [ 3 : 4 ] == b"-" message = line [ 4 : ] . strip ( b" \t\r\n" ) . decode ( "ascii" ) messages . append ( message ) full_message = "\n" . join ( messages ) return code , full_message
| 2,997
|
https://github.com/hwmrocker/smtplibaio/blob/84ce8e45b7e706476739d0efcb416c18ecabbbb6/smtplibaio/streams.py#L31-L79
|
[
"def",
"rewind",
"(",
"self",
",",
"count",
")",
":",
"if",
"count",
">",
"self",
".",
"_index",
":",
"# pragma: no cover",
"raise",
"ValueError",
"(",
"\"Can't rewind past beginning!\"",
")",
"self",
".",
"_index",
"-=",
"count"
] |
Build HopInfo object from url .
|
def make_hop_info_from_url ( url , verify_reachability = None ) : parsed = urlparse ( url ) username = None if parsed . username is None else unquote ( parsed . username ) # It's None if not exists password = None if parsed . password is None else unquote ( parsed . password ) # It's None if not exists try : enable_password = parse_qs ( parsed . query ) [ "enable_password" ] [ 0 ] except KeyError : enable_password = None hop_info = HopInfo ( parsed . scheme , parsed . hostname , username , password , parsed . port , enable_password , verify_reachability = verify_reachability ) if hop_info . is_valid ( ) : return hop_info raise InvalidHopInfoError
| 2,998
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/hopinfo.py#L16-L63
|
[
"def",
"bucket",
"(",
"self",
",",
"experiment",
",",
"user_id",
",",
"bucketing_id",
")",
":",
"if",
"not",
"experiment",
":",
"return",
"None",
"# Determine if experiment is in a mutually exclusive group",
"if",
"experiment",
".",
"groupPolicy",
"in",
"GROUP_POLICIES",
":",
"group",
"=",
"self",
".",
"config",
".",
"get_group",
"(",
"experiment",
".",
"groupId",
")",
"if",
"not",
"group",
":",
"return",
"None",
"user_experiment_id",
"=",
"self",
".",
"find_bucket",
"(",
"bucketing_id",
",",
"experiment",
".",
"groupId",
",",
"group",
".",
"trafficAllocation",
")",
"if",
"not",
"user_experiment_id",
":",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in no experiment.'",
"%",
"user_id",
")",
"return",
"None",
"if",
"user_experiment_id",
"!=",
"experiment",
".",
"id",
":",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is not in experiment \"%s\" of group %s.'",
"%",
"(",
"user_id",
",",
"experiment",
".",
"key",
",",
"experiment",
".",
"groupId",
")",
")",
"return",
"None",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in experiment %s of group %s.'",
"%",
"(",
"user_id",
",",
"experiment",
".",
"key",
",",
"experiment",
".",
"groupId",
")",
")",
"# Bucket user if not in white-list and in group (if any)",
"variation_id",
"=",
"self",
".",
"find_bucket",
"(",
"bucketing_id",
",",
"experiment",
".",
"id",
",",
"experiment",
".",
"trafficAllocation",
")",
"if",
"variation_id",
":",
"variation",
"=",
"self",
".",
"config",
".",
"get_variation_from_id",
"(",
"experiment",
".",
"key",
",",
"variation_id",
")",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in variation \"%s\" of experiment %s.'",
"%",
"(",
"user_id",
",",
"variation",
".",
"key",
",",
"experiment",
".",
"key",
")",
")",
"return",
"variation",
"self",
".",
"config",
".",
"logger",
".",
"info",
"(",
"'User \"%s\" is in no variation.'",
"%",
"user_id",
")",
"return",
"None"
] |
Return if host is reachable .
|
def is_reachable ( self ) : if self . verify_reachability and hasattr ( self . verify_reachability , '__call__' ) : return self . verify_reachability ( host = self . hostname , port = self . port ) # assume is reachable if can't verify return True
| 2,999
|
https://github.com/kstaniek/condoor/blob/77c054b29d4e286c1d7aca2c74dff86b805e1fae/condoor/hopinfo.py#L118-L124
|
[
"def",
"xrdb",
"(",
"xrdb_files",
"=",
"None",
")",
":",
"xrdb_files",
"=",
"xrdb_files",
"or",
"[",
"os",
".",
"path",
".",
"join",
"(",
"CACHE_DIR",
",",
"\"colors.Xresources\"",
")",
"]",
"if",
"shutil",
".",
"which",
"(",
"\"xrdb\"",
")",
"and",
"OS",
"!=",
"\"Darwin\"",
":",
"for",
"file",
"in",
"xrdb_files",
":",
"subprocess",
".",
"run",
"(",
"[",
"\"xrdb\"",
",",
"\"-merge\"",
",",
"\"-quiet\"",
",",
"file",
"]",
")"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.